]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/df-scan.c
re PR rtl-optimization/46315 (-O2 -fno-strict-overflow causes wrong code generation)
[thirdparty/gcc.git] / gcc / df-scan.c
1 /* Scanning of rtl for dataflow analysis.
2 Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
3 2008, 2009, 2010 Free Software Foundation, Inc.
4 Originally contributed by Michael P. Hayes
5 (m.hayes@elec.canterbury.ac.nz, mhayes@redhat.com)
6 Major rewrite contributed by Danny Berlin (dberlin@dberlin.org)
7 and Kenneth Zadeck (zadeck@naturalbridge.com).
8
9 This file is part of GCC.
10
11 GCC is free software; you can redistribute it and/or modify it under
12 the terms of the GNU General Public License as published by the Free
13 Software Foundation; either version 3, or (at your option) any later
14 version.
15
16 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
17 WARRANTY; without even the implied warranty of MERCHANTABILITY or
18 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 for more details.
20
21 You should have received a copy of the GNU General Public License
22 along with GCC; see the file COPYING3. If not see
23 <http://www.gnu.org/licenses/>. */
24
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "function.h"
34 #include "regs.h"
35 #include "output.h"
36 #include "alloc-pool.h"
37 #include "flags.h"
38 #include "hard-reg-set.h"
39 #include "basic-block.h"
40 #include "sbitmap.h"
41 #include "bitmap.h"
42 #include "timevar.h"
43 #include "tree.h"
44 #include "target.h"
45 #include "target-def.h"
46 #include "df.h"
47 #include "tree-pass.h"
48 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
49
50 DEF_VEC_P(df_ref);
51 DEF_VEC_ALLOC_P_STACK(df_ref);
52
53 #define VEC_df_ref_stack_alloc(alloc) VEC_stack_alloc (df_ref, alloc)
54
55 typedef struct df_mw_hardreg *df_mw_hardreg_ptr;
56
57 DEF_VEC_P(df_mw_hardreg_ptr);
58 DEF_VEC_ALLOC_P_STACK(df_mw_hardreg_ptr);
59
60 #define VEC_df_mw_hardreg_ptr_stack_alloc(alloc) \
61 VEC_stack_alloc (df_mw_hardreg_ptr, alloc)
62
63 #ifndef HAVE_epilogue
64 #define HAVE_epilogue 0
65 #endif
66 #ifndef HAVE_prologue
67 #define HAVE_prologue 0
68 #endif
69 #ifndef HAVE_sibcall_epilogue
70 #define HAVE_sibcall_epilogue 0
71 #endif
72
73 #ifndef EPILOGUE_USES
74 #define EPILOGUE_USES(REGNO) 0
75 #endif
76
77 /* The following two macros free the vecs that hold either the refs or
78 the mw refs. They are a little tricky because the vec has 0
79 elements is special and is not to be freed. */
80 #define df_scan_free_ref_vec(V) \
81 do { \
82 if (V && *V) \
83 free (V); \
84 } while (0)
85
86 #define df_scan_free_mws_vec(V) \
87 do { \
88 if (V && *V) \
89 free (V); \
90 } while (0)
91
92 /* The set of hard registers in eliminables[i].from. */
93
94 static HARD_REG_SET elim_reg_set;
95
96 /* Initialize ur_in and ur_out as if all hard registers were partially
97 available. */
98
99 struct df_collection_rec
100 {
101 VEC(df_ref,stack) *def_vec;
102 VEC(df_ref,stack) *use_vec;
103 VEC(df_ref,stack) *eq_use_vec;
104 VEC(df_mw_hardreg_ptr,stack) *mw_vec;
105 };
106
107 static df_ref df_null_ref_rec[1];
108 static struct df_mw_hardreg * df_null_mw_rec[1];
109
110 static void df_ref_record (enum df_ref_class, struct df_collection_rec *,
111 rtx, rtx *,
112 basic_block, struct df_insn_info *,
113 enum df_ref_type, int ref_flags);
114 static void df_def_record_1 (struct df_collection_rec *, rtx,
115 basic_block, struct df_insn_info *,
116 int ref_flags);
117 static void df_defs_record (struct df_collection_rec *, rtx,
118 basic_block, struct df_insn_info *,
119 int ref_flags);
120 static void df_uses_record (struct df_collection_rec *,
121 rtx *, enum df_ref_type,
122 basic_block, struct df_insn_info *,
123 int ref_flags);
124
125 static df_ref df_ref_create_structure (enum df_ref_class,
126 struct df_collection_rec *, rtx, rtx *,
127 basic_block, struct df_insn_info *,
128 enum df_ref_type, int ref_flags);
129 static void df_insn_refs_collect (struct df_collection_rec*,
130 basic_block, struct df_insn_info *);
131 static void df_canonize_collection_rec (struct df_collection_rec *);
132
133 static void df_get_regular_block_artificial_uses (bitmap);
134 static void df_get_eh_block_artificial_uses (bitmap);
135
136 static void df_record_entry_block_defs (bitmap);
137 static void df_record_exit_block_uses (bitmap);
138 static void df_get_exit_block_use_set (bitmap);
139 static void df_get_entry_block_def_set (bitmap);
140 static void df_grow_ref_info (struct df_ref_info *, unsigned int);
141 static void df_ref_chain_delete_du_chain (df_ref *);
142 static void df_ref_chain_delete (df_ref *);
143
144 static void df_refs_add_to_chains (struct df_collection_rec *,
145 basic_block, rtx);
146
147 static bool df_insn_refs_verify (struct df_collection_rec *, basic_block, rtx, bool);
148 static void df_entry_block_defs_collect (struct df_collection_rec *, bitmap);
149 static void df_exit_block_uses_collect (struct df_collection_rec *, bitmap);
150 static void df_install_ref (df_ref, struct df_reg_info *,
151 struct df_ref_info *, bool);
152
153 static int df_ref_compare (const void *, const void *);
154 static int df_mw_compare (const void *, const void *);
155
156 /* Indexed by hardware reg number, is true if that register is ever
157 used in the current function.
158
159 In df-scan.c, this is set up to record the hard regs used
160 explicitly. Reload adds in the hard regs used for holding pseudo
161 regs. Final uses it to generate the code in the function prologue
162 and epilogue to save and restore registers as needed. */
163
164 static bool regs_ever_live[FIRST_PSEUDO_REGISTER];
165 \f
166 /*----------------------------------------------------------------------------
167 SCANNING DATAFLOW PROBLEM
168
169 There are several ways in which scanning looks just like the other
170 dataflow problems. It shares the all the mechanisms for local info
171 as well as basic block info. Where it differs is when and how often
172 it gets run. It also has no need for the iterative solver.
173 ----------------------------------------------------------------------------*/
174
175 /* Problem data for the scanning dataflow function. */
176 struct df_scan_problem_data
177 {
178 alloc_pool ref_base_pool;
179 alloc_pool ref_artificial_pool;
180 alloc_pool ref_regular_pool;
181 alloc_pool insn_pool;
182 alloc_pool reg_pool;
183 alloc_pool mw_reg_pool;
184 bitmap_obstack reg_bitmaps;
185 bitmap_obstack insn_bitmaps;
186 };
187
188 typedef struct df_scan_bb_info *df_scan_bb_info_t;
189
190
191 /* Internal function to shut down the scanning problem. */
192 static void
193 df_scan_free_internal (void)
194 {
195 struct df_scan_problem_data *problem_data
196 = (struct df_scan_problem_data *) df_scan->problem_data;
197 unsigned int i;
198 basic_block bb;
199
200 /* The vectors that hold the refs are not pool allocated because
201 they come in many sizes. This makes them impossible to delete
202 all at once. */
203 for (i = 0; i < DF_INSN_SIZE(); i++)
204 {
205 struct df_insn_info *insn_info = DF_INSN_UID_GET(i);
206 /* Skip the insns that have no insn_info or have been
207 deleted. */
208 if (insn_info)
209 {
210 df_scan_free_ref_vec (insn_info->defs);
211 df_scan_free_ref_vec (insn_info->uses);
212 df_scan_free_ref_vec (insn_info->eq_uses);
213 df_scan_free_mws_vec (insn_info->mw_hardregs);
214 }
215 }
216
217 FOR_ALL_BB (bb)
218 {
219 unsigned int bb_index = bb->index;
220 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
221 if (bb_info)
222 {
223 df_scan_free_ref_vec (bb_info->artificial_defs);
224 df_scan_free_ref_vec (bb_info->artificial_uses);
225 }
226 }
227
228 free (df->def_info.refs);
229 free (df->def_info.begin);
230 free (df->def_info.count);
231 memset (&df->def_info, 0, (sizeof (struct df_ref_info)));
232
233 free (df->use_info.refs);
234 free (df->use_info.begin);
235 free (df->use_info.count);
236 memset (&df->use_info, 0, (sizeof (struct df_ref_info)));
237
238 free (df->def_regs);
239 df->def_regs = NULL;
240 free (df->use_regs);
241 df->use_regs = NULL;
242 free (df->eq_use_regs);
243 df->eq_use_regs = NULL;
244 df->regs_size = 0;
245 DF_REG_SIZE(df) = 0;
246
247 free (df->insns);
248 df->insns = NULL;
249 DF_INSN_SIZE () = 0;
250
251 free (df_scan->block_info);
252 df_scan->block_info = NULL;
253 df_scan->block_info_size = 0;
254
255 bitmap_clear (&df->hardware_regs_used);
256 bitmap_clear (&df->regular_block_artificial_uses);
257 bitmap_clear (&df->eh_block_artificial_uses);
258 BITMAP_FREE (df->entry_block_defs);
259 BITMAP_FREE (df->exit_block_uses);
260 bitmap_clear (&df->insns_to_delete);
261 bitmap_clear (&df->insns_to_rescan);
262 bitmap_clear (&df->insns_to_notes_rescan);
263
264 free_alloc_pool (problem_data->ref_base_pool);
265 free_alloc_pool (problem_data->ref_artificial_pool);
266 free_alloc_pool (problem_data->ref_regular_pool);
267 free_alloc_pool (problem_data->insn_pool);
268 free_alloc_pool (problem_data->reg_pool);
269 free_alloc_pool (problem_data->mw_reg_pool);
270 bitmap_obstack_release (&problem_data->reg_bitmaps);
271 bitmap_obstack_release (&problem_data->insn_bitmaps);
272 free (df_scan->problem_data);
273 }
274
275
276 /* Free basic block info. */
277
278 static void
279 df_scan_free_bb_info (basic_block bb, void *vbb_info)
280 {
281 struct df_scan_bb_info *bb_info = (struct df_scan_bb_info *) vbb_info;
282 unsigned int bb_index = bb->index;
283
284 /* See if bb_info is initialized. */
285 if (bb_info->artificial_defs)
286 {
287 rtx insn;
288 FOR_BB_INSNS (bb, insn)
289 {
290 if (INSN_P (insn))
291 /* Record defs within INSN. */
292 df_insn_delete (bb, INSN_UID (insn));
293 }
294
295 if (bb_index < df_scan->block_info_size)
296 bb_info = df_scan_get_bb_info (bb_index);
297
298 /* Get rid of any artificial uses or defs. */
299 if (bb_info->artificial_defs)
300 {
301 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
302 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
303 df_ref_chain_delete (bb_info->artificial_defs);
304 df_ref_chain_delete (bb_info->artificial_uses);
305 bb_info->artificial_defs = NULL;
306 bb_info->artificial_uses = NULL;
307 }
308 }
309 }
310
311
312 /* Allocate the problem data for the scanning problem. This should be
313 called when the problem is created or when the entire function is to
314 be rescanned. */
315 void
316 df_scan_alloc (bitmap all_blocks ATTRIBUTE_UNUSED)
317 {
318 struct df_scan_problem_data *problem_data;
319 unsigned int insn_num = get_max_uid () + 1;
320 unsigned int block_size = 400;
321 basic_block bb;
322
323 /* Given the number of pools, this is really faster than tearing
324 everything apart. */
325 if (df_scan->problem_data)
326 df_scan_free_internal ();
327
328 problem_data = XNEW (struct df_scan_problem_data);
329 df_scan->problem_data = problem_data;
330 df_scan->computed = true;
331
332 problem_data->ref_base_pool
333 = create_alloc_pool ("df_scan ref base",
334 sizeof (struct df_base_ref), block_size);
335 problem_data->ref_artificial_pool
336 = create_alloc_pool ("df_scan ref artificial",
337 sizeof (struct df_artificial_ref), block_size);
338 problem_data->ref_regular_pool
339 = create_alloc_pool ("df_scan ref regular",
340 sizeof (struct df_regular_ref), block_size);
341 problem_data->insn_pool
342 = create_alloc_pool ("df_scan insn",
343 sizeof (struct df_insn_info), block_size);
344 problem_data->reg_pool
345 = create_alloc_pool ("df_scan reg",
346 sizeof (struct df_reg_info), block_size);
347 problem_data->mw_reg_pool
348 = create_alloc_pool ("df_scan mw_reg",
349 sizeof (struct df_mw_hardreg), block_size);
350
351 bitmap_obstack_initialize (&problem_data->reg_bitmaps);
352 bitmap_obstack_initialize (&problem_data->insn_bitmaps);
353
354 insn_num += insn_num / 4;
355 df_grow_reg_info ();
356
357 df_grow_insn_info ();
358 df_grow_bb_info (df_scan);
359
360 FOR_ALL_BB (bb)
361 {
362 unsigned int bb_index = bb->index;
363 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb_index);
364 bb_info->artificial_defs = NULL;
365 bb_info->artificial_uses = NULL;
366 }
367
368 bitmap_initialize (&df->hardware_regs_used, &problem_data->reg_bitmaps);
369 bitmap_initialize (&df->regular_block_artificial_uses, &problem_data->reg_bitmaps);
370 bitmap_initialize (&df->eh_block_artificial_uses, &problem_data->reg_bitmaps);
371 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
372 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
373 bitmap_initialize (&df->insns_to_delete, &problem_data->insn_bitmaps);
374 bitmap_initialize (&df->insns_to_rescan, &problem_data->insn_bitmaps);
375 bitmap_initialize (&df->insns_to_notes_rescan, &problem_data->insn_bitmaps);
376 df_scan->optional_p = false;
377 }
378
379
380 /* Free all of the data associated with the scan problem. */
381
382 static void
383 df_scan_free (void)
384 {
385 if (df_scan->problem_data)
386 df_scan_free_internal ();
387
388 if (df->blocks_to_analyze)
389 {
390 BITMAP_FREE (df->blocks_to_analyze);
391 df->blocks_to_analyze = NULL;
392 }
393
394 free (df_scan);
395 }
396
397 /* Dump the preamble for DF_SCAN dump. */
398 static void
399 df_scan_start_dump (FILE *file ATTRIBUTE_UNUSED)
400 {
401 int i;
402 int dcount = 0;
403 int ucount = 0;
404 int ecount = 0;
405 int icount = 0;
406 int ccount = 0;
407 basic_block bb;
408 rtx insn;
409
410 fprintf (file, ";; invalidated by call \t");
411 df_print_regset (file, regs_invalidated_by_call_regset);
412 fprintf (file, ";; hardware regs used \t");
413 df_print_regset (file, &df->hardware_regs_used);
414 fprintf (file, ";; regular block artificial uses \t");
415 df_print_regset (file, &df->regular_block_artificial_uses);
416 fprintf (file, ";; eh block artificial uses \t");
417 df_print_regset (file, &df->eh_block_artificial_uses);
418 fprintf (file, ";; entry block defs \t");
419 df_print_regset (file, df->entry_block_defs);
420 fprintf (file, ";; exit block uses \t");
421 df_print_regset (file, df->exit_block_uses);
422 fprintf (file, ";; regs ever live \t");
423 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
424 if (df_regs_ever_live_p (i))
425 fprintf (file, " %d[%s]", i, reg_names[i]);
426 fprintf (file, "\n;; ref usage \t");
427
428 for (i = 0; i < (int)df->regs_inited; i++)
429 if (DF_REG_DEF_COUNT (i) || DF_REG_USE_COUNT (i) || DF_REG_EQ_USE_COUNT (i))
430 {
431 const char * sep = "";
432
433 fprintf (file, "r%d={", i);
434 if (DF_REG_DEF_COUNT (i))
435 {
436 fprintf (file, "%dd", DF_REG_DEF_COUNT (i));
437 sep = ",";
438 dcount += DF_REG_DEF_COUNT (i);
439 }
440 if (DF_REG_USE_COUNT (i))
441 {
442 fprintf (file, "%s%du", sep, DF_REG_USE_COUNT (i));
443 sep = ",";
444 ucount += DF_REG_USE_COUNT (i);
445 }
446 if (DF_REG_EQ_USE_COUNT (i))
447 {
448 fprintf (file, "%s%de", sep, DF_REG_EQ_USE_COUNT (i));
449 ecount += DF_REG_EQ_USE_COUNT (i);
450 }
451 fprintf (file, "} ");
452 }
453
454 FOR_EACH_BB (bb)
455 FOR_BB_INSNS (bb, insn)
456 if (INSN_P (insn))
457 {
458 if (CALL_P (insn))
459 ccount++;
460 else
461 icount++;
462 }
463
464 fprintf (file, "\n;; total ref usage %d{%dd,%du,%de}"
465 " in %d{%d regular + %d call} insns.\n",
466 dcount + ucount + ecount, dcount, ucount, ecount,
467 icount + ccount, icount, ccount);
468 }
469
470 /* Dump the bb_info for a given basic block. */
471 static void
472 df_scan_start_block (basic_block bb, FILE *file)
473 {
474 struct df_scan_bb_info *bb_info
475 = df_scan_get_bb_info (bb->index);
476
477 if (bb_info)
478 {
479 fprintf (file, ";; bb %d artificial_defs: ", bb->index);
480 df_refs_chain_dump (bb_info->artificial_defs, true, file);
481 fprintf (file, "\n;; bb %d artificial_uses: ", bb->index);
482 df_refs_chain_dump (bb_info->artificial_uses, true, file);
483 fprintf (file, "\n");
484 }
485 #if 0
486 {
487 rtx insn;
488 FOR_BB_INSNS (bb, insn)
489 if (INSN_P (insn))
490 df_insn_debug (insn, false, file);
491 }
492 #endif
493 }
494
495 static struct df_problem problem_SCAN =
496 {
497 DF_SCAN, /* Problem id. */
498 DF_NONE, /* Direction. */
499 df_scan_alloc, /* Allocate the problem specific data. */
500 NULL, /* Reset global information. */
501 df_scan_free_bb_info, /* Free basic block info. */
502 NULL, /* Local compute function. */
503 NULL, /* Init the solution specific data. */
504 NULL, /* Iterative solver. */
505 NULL, /* Confluence operator 0. */
506 NULL, /* Confluence operator n. */
507 NULL, /* Transfer function. */
508 NULL, /* Finalize function. */
509 df_scan_free, /* Free all of the problem information. */
510 NULL, /* Remove this problem from the stack of dataflow problems. */
511 df_scan_start_dump, /* Debugging. */
512 df_scan_start_block, /* Debugging start block. */
513 NULL, /* Debugging end block. */
514 NULL, /* Incremental solution verify start. */
515 NULL, /* Incremental solution verify end. */
516 NULL, /* Dependent problem. */
517 sizeof (struct df_scan_bb_info),/* Size of entry of block_info array. */
518 TV_DF_SCAN, /* Timing variable. */
519 false /* Reset blocks on dropping out of blocks_to_analyze. */
520 };
521
522
523 /* Create a new DATAFLOW instance and add it to an existing instance
524 of DF. The returned structure is what is used to get at the
525 solution. */
526
527 void
528 df_scan_add_problem (void)
529 {
530 df_add_problem (&problem_SCAN);
531 }
532
533 \f
534 /*----------------------------------------------------------------------------
535 Storage Allocation Utilities
536 ----------------------------------------------------------------------------*/
537
538
539 /* First, grow the reg_info information. If the current size is less than
540 the number of pseudos, grow to 25% more than the number of
541 pseudos.
542
543 Second, assure that all of the slots up to max_reg_num have been
544 filled with reg_info structures. */
545
546 void
547 df_grow_reg_info (void)
548 {
549 unsigned int max_reg = max_reg_num ();
550 unsigned int new_size = max_reg;
551 struct df_scan_problem_data *problem_data
552 = (struct df_scan_problem_data *) df_scan->problem_data;
553 unsigned int i;
554
555 if (df->regs_size < new_size)
556 {
557 new_size += new_size / 4;
558 df->def_regs = XRESIZEVEC (struct df_reg_info *, df->def_regs, new_size);
559 df->use_regs = XRESIZEVEC (struct df_reg_info *, df->use_regs, new_size);
560 df->eq_use_regs = XRESIZEVEC (struct df_reg_info *, df->eq_use_regs,
561 new_size);
562 df->def_info.begin = XRESIZEVEC (unsigned, df->def_info.begin, new_size);
563 df->def_info.count = XRESIZEVEC (unsigned, df->def_info.count, new_size);
564 df->use_info.begin = XRESIZEVEC (unsigned, df->use_info.begin, new_size);
565 df->use_info.count = XRESIZEVEC (unsigned, df->use_info.count, new_size);
566 df->regs_size = new_size;
567 }
568
569 for (i = df->regs_inited; i < max_reg; i++)
570 {
571 struct df_reg_info *reg_info;
572
573 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
574 memset (reg_info, 0, sizeof (struct df_reg_info));
575 df->def_regs[i] = reg_info;
576 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
577 memset (reg_info, 0, sizeof (struct df_reg_info));
578 df->use_regs[i] = reg_info;
579 reg_info = (struct df_reg_info *) pool_alloc (problem_data->reg_pool);
580 memset (reg_info, 0, sizeof (struct df_reg_info));
581 df->eq_use_regs[i] = reg_info;
582 df->def_info.begin[i] = 0;
583 df->def_info.count[i] = 0;
584 df->use_info.begin[i] = 0;
585 df->use_info.count[i] = 0;
586 }
587
588 df->regs_inited = max_reg;
589 }
590
591
592 /* Grow the ref information. */
593
594 static void
595 df_grow_ref_info (struct df_ref_info *ref_info, unsigned int new_size)
596 {
597 if (ref_info->refs_size < new_size)
598 {
599 ref_info->refs = XRESIZEVEC (df_ref, ref_info->refs, new_size);
600 memset (ref_info->refs + ref_info->refs_size, 0,
601 (new_size - ref_info->refs_size) *sizeof (df_ref));
602 ref_info->refs_size = new_size;
603 }
604 }
605
606
607 /* Check and grow the ref information if necessary. This routine
608 guarantees total_size + BITMAP_ADDEND amount of entries in refs
609 array. It updates ref_info->refs_size only and does not change
610 ref_info->total_size. */
611
612 static void
613 df_check_and_grow_ref_info (struct df_ref_info *ref_info,
614 unsigned bitmap_addend)
615 {
616 if (ref_info->refs_size < ref_info->total_size + bitmap_addend)
617 {
618 int new_size = ref_info->total_size + bitmap_addend;
619 new_size += ref_info->total_size / 4;
620 df_grow_ref_info (ref_info, new_size);
621 }
622 }
623
624
625 /* Grow the ref information. If the current size is less than the
626 number of instructions, grow to 25% more than the number of
627 instructions. */
628
629 void
630 df_grow_insn_info (void)
631 {
632 unsigned int new_size = get_max_uid () + 1;
633 if (DF_INSN_SIZE () < new_size)
634 {
635 new_size += new_size / 4;
636 df->insns = XRESIZEVEC (struct df_insn_info *, df->insns, new_size);
637 memset (df->insns + df->insns_size, 0,
638 (new_size - DF_INSN_SIZE ()) *sizeof (struct df_insn_info *));
639 DF_INSN_SIZE () = new_size;
640 }
641 }
642
643
644
645 \f
646 /*----------------------------------------------------------------------------
647 PUBLIC INTERFACES FOR SMALL GRAIN CHANGES TO SCANNING.
648 ----------------------------------------------------------------------------*/
649
650 /* Rescan all of the block_to_analyze or all of the blocks in the
651 function if df_set_blocks if blocks_to_analyze is NULL; */
652
653 void
654 df_scan_blocks (void)
655 {
656 basic_block bb;
657
658 df->def_info.ref_order = DF_REF_ORDER_NO_TABLE;
659 df->use_info.ref_order = DF_REF_ORDER_NO_TABLE;
660
661 df_get_regular_block_artificial_uses (&df->regular_block_artificial_uses);
662 df_get_eh_block_artificial_uses (&df->eh_block_artificial_uses);
663
664 bitmap_ior_into (&df->eh_block_artificial_uses,
665 &df->regular_block_artificial_uses);
666
667 /* ENTRY and EXIT blocks have special defs/uses. */
668 df_get_entry_block_def_set (df->entry_block_defs);
669 df_record_entry_block_defs (df->entry_block_defs);
670 df_get_exit_block_use_set (df->exit_block_uses);
671 df_record_exit_block_uses (df->exit_block_uses);
672 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
673 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
674
675 /* Regular blocks */
676 FOR_EACH_BB (bb)
677 {
678 unsigned int bb_index = bb->index;
679 df_bb_refs_record (bb_index, true);
680 }
681 }
682
683
684 /* Create a new ref of type DF_REF_TYPE for register REG at address
685 LOC within INSN of BB. This function is only used externally. */
686
687 df_ref
688 df_ref_create (rtx reg, rtx *loc, rtx insn,
689 basic_block bb,
690 enum df_ref_type ref_type,
691 int ref_flags)
692 {
693 df_ref ref;
694 struct df_reg_info **reg_info;
695 struct df_ref_info *ref_info;
696 df_ref *ref_rec;
697 df_ref **ref_rec_ptr;
698 unsigned int count = 0;
699 bool add_to_table;
700 enum df_ref_class cl;
701
702 df_grow_reg_info ();
703
704 /* You cannot hack artificial refs. */
705 gcc_assert (insn);
706
707 if (loc)
708 cl = DF_REF_REGULAR;
709 else
710 cl = DF_REF_BASE;
711 ref = df_ref_create_structure (cl, NULL, reg, loc, bb, DF_INSN_INFO_GET (insn),
712 ref_type, ref_flags);
713
714 if (DF_REF_REG_DEF_P (ref))
715 {
716 reg_info = df->def_regs;
717 ref_info = &df->def_info;
718 ref_rec_ptr = &DF_INSN_DEFS (insn);
719 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
720 }
721 else if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
722 {
723 reg_info = df->eq_use_regs;
724 ref_info = &df->use_info;
725 ref_rec_ptr = &DF_INSN_EQ_USES (insn);
726 switch (ref_info->ref_order)
727 {
728 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
729 case DF_REF_ORDER_BY_REG_WITH_NOTES:
730 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
731 add_to_table = true;
732 break;
733 default:
734 add_to_table = false;
735 break;
736 }
737 }
738 else
739 {
740 reg_info = df->use_regs;
741 ref_info = &df->use_info;
742 ref_rec_ptr = &DF_INSN_USES (insn);
743 add_to_table = ref_info->ref_order != DF_REF_ORDER_NO_TABLE;
744 }
745
746 /* Do not add if ref is not in the right blocks. */
747 if (add_to_table && df->analyze_subset)
748 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
749
750 df_install_ref (ref, reg_info[DF_REF_REGNO (ref)], ref_info, add_to_table);
751
752 if (add_to_table)
753 switch (ref_info->ref_order)
754 {
755 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
756 case DF_REF_ORDER_BY_REG_WITH_NOTES:
757 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
758 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
759 break;
760 default:
761 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
762 break;
763 }
764
765 ref_rec = *ref_rec_ptr;
766 while (*ref_rec)
767 {
768 count++;
769 ref_rec++;
770 }
771
772 ref_rec = *ref_rec_ptr;
773 if (count)
774 {
775 ref_rec = XRESIZEVEC (df_ref, ref_rec, count+2);
776 *ref_rec_ptr = ref_rec;
777 ref_rec[count] = ref;
778 ref_rec[count+1] = NULL;
779 qsort (ref_rec, count + 1, sizeof (df_ref), df_ref_compare);
780 }
781 else
782 {
783 df_ref *ref_rec = XNEWVEC (df_ref, 2);
784 ref_rec[0] = ref;
785 ref_rec[1] = NULL;
786 *ref_rec_ptr = ref_rec;
787 }
788
789 #if 0
790 if (dump_file)
791 {
792 fprintf (dump_file, "adding ref ");
793 df_ref_debug (ref, dump_file);
794 }
795 #endif
796 /* By adding the ref directly, df_insn_rescan my not find any
797 differences even though the block will have changed. So we need
798 to mark the block dirty ourselves. */
799 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
800 df_set_bb_dirty (bb);
801
802 return ref;
803 }
804
805
806 \f
807 /*----------------------------------------------------------------------------
808 UTILITIES TO CREATE AND DESTROY REFS AND CHAINS.
809 ----------------------------------------------------------------------------*/
810
811 static void
812 df_free_ref (df_ref ref)
813 {
814 struct df_scan_problem_data *problem_data
815 = (struct df_scan_problem_data *) df_scan->problem_data;
816
817 switch (DF_REF_CLASS (ref))
818 {
819 case DF_REF_BASE:
820 pool_free (problem_data->ref_base_pool, ref);
821 break;
822
823 case DF_REF_ARTIFICIAL:
824 pool_free (problem_data->ref_artificial_pool, ref);
825 break;
826
827 case DF_REF_REGULAR:
828 pool_free (problem_data->ref_regular_pool, ref);
829 break;
830 }
831 }
832
833
834 /* Unlink and delete REF at the reg_use, reg_eq_use or reg_def chain.
835 Also delete the def-use or use-def chain if it exists. */
836
837 static void
838 df_reg_chain_unlink (df_ref ref)
839 {
840 df_ref next = DF_REF_NEXT_REG (ref);
841 df_ref prev = DF_REF_PREV_REG (ref);
842 int id = DF_REF_ID (ref);
843 struct df_reg_info *reg_info;
844 df_ref *refs = NULL;
845
846 if (DF_REF_REG_DEF_P (ref))
847 {
848 int regno = DF_REF_REGNO (ref);
849 reg_info = DF_REG_DEF_GET (regno);
850 refs = df->def_info.refs;
851 }
852 else
853 {
854 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
855 {
856 reg_info = DF_REG_EQ_USE_GET (DF_REF_REGNO (ref));
857 switch (df->use_info.ref_order)
858 {
859 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
860 case DF_REF_ORDER_BY_REG_WITH_NOTES:
861 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
862 refs = df->use_info.refs;
863 break;
864 default:
865 break;
866 }
867 }
868 else
869 {
870 reg_info = DF_REG_USE_GET (DF_REF_REGNO (ref));
871 refs = df->use_info.refs;
872 }
873 }
874
875 if (refs)
876 {
877 if (df->analyze_subset)
878 {
879 if (bitmap_bit_p (df->blocks_to_analyze, DF_REF_BBNO (ref)))
880 refs[id] = NULL;
881 }
882 else
883 refs[id] = NULL;
884 }
885
886 /* Delete any def-use or use-def chains that start here. It is
887 possible that there is trash in this field. This happens for
888 insns that have been deleted when rescanning has been deferred
889 and the chain problem has also been deleted. The chain tear down
890 code skips deleted insns. */
891 if (df_chain && DF_REF_CHAIN (ref))
892 df_chain_unlink (ref);
893
894 reg_info->n_refs--;
895 if (DF_REF_FLAGS_IS_SET (ref, DF_HARD_REG_LIVE))
896 {
897 gcc_assert (DF_REF_REGNO (ref) < FIRST_PSEUDO_REGISTER);
898 df->hard_regs_live_count[DF_REF_REGNO (ref)]--;
899 }
900
901 /* Unlink from the reg chain. If there is no prev, this is the
902 first of the list. If not, just join the next and prev. */
903 if (prev)
904 DF_REF_NEXT_REG (prev) = next;
905 else
906 {
907 gcc_assert (reg_info->reg_chain == ref);
908 reg_info->reg_chain = next;
909 }
910 if (next)
911 DF_REF_PREV_REG (next) = prev;
912
913 df_free_ref (ref);
914 }
915
916
917 /* Remove REF from VEC. */
918
919 static void
920 df_ref_compress_rec (df_ref **vec_ptr, df_ref ref)
921 {
922 df_ref *vec = *vec_ptr;
923
924 if (vec[1])
925 {
926 while (*vec && *vec != ref)
927 vec++;
928
929 while (*vec)
930 {
931 *vec = *(vec+1);
932 vec++;
933 }
934 }
935 else
936 {
937 free (vec);
938 *vec_ptr = df_null_ref_rec;
939 }
940 }
941
942
943 /* Unlink REF from all def-use/use-def chains, etc. */
944
945 void
946 df_ref_remove (df_ref ref)
947 {
948 #if 0
949 if (dump_file)
950 {
951 fprintf (dump_file, "removing ref ");
952 df_ref_debug (ref, dump_file);
953 }
954 #endif
955
956 if (DF_REF_REG_DEF_P (ref))
957 {
958 if (DF_REF_IS_ARTIFICIAL (ref))
959 {
960 struct df_scan_bb_info *bb_info
961 = df_scan_get_bb_info (DF_REF_BBNO (ref));
962 df_ref_compress_rec (&bb_info->artificial_defs, ref);
963 }
964 else
965 {
966 unsigned int uid = DF_REF_INSN_UID (ref);
967 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
968 df_ref_compress_rec (&insn_rec->defs, ref);
969 }
970 }
971 else
972 {
973 if (DF_REF_IS_ARTIFICIAL (ref))
974 {
975 struct df_scan_bb_info *bb_info
976 = df_scan_get_bb_info (DF_REF_BBNO (ref));
977 df_ref_compress_rec (&bb_info->artificial_uses, ref);
978 }
979 else
980 {
981 unsigned int uid = DF_REF_INSN_UID (ref);
982 struct df_insn_info *insn_rec = DF_INSN_UID_GET (uid);
983
984 if (DF_REF_FLAGS (ref) & DF_REF_IN_NOTE)
985 df_ref_compress_rec (&insn_rec->eq_uses, ref);
986 else
987 df_ref_compress_rec (&insn_rec->uses, ref);
988 }
989 }
990
991 /* By deleting the ref directly, df_insn_rescan my not find any
992 differences even though the block will have changed. So we need
993 to mark the block dirty ourselves. */
994 if (!DEBUG_INSN_P (DF_REF_INSN (ref)))
995 df_set_bb_dirty (DF_REF_BB (ref));
996 df_reg_chain_unlink (ref);
997 }
998
999
1000 /* Create the insn record for INSN. If there was one there, zero it
1001 out. */
1002
1003 struct df_insn_info *
1004 df_insn_create_insn_record (rtx insn)
1005 {
1006 struct df_scan_problem_data *problem_data
1007 = (struct df_scan_problem_data *) df_scan->problem_data;
1008 struct df_insn_info *insn_rec;
1009
1010 df_grow_insn_info ();
1011 insn_rec = DF_INSN_INFO_GET (insn);
1012 if (!insn_rec)
1013 {
1014 insn_rec = (struct df_insn_info *) pool_alloc (problem_data->insn_pool);
1015 DF_INSN_INFO_SET (insn, insn_rec);
1016 }
1017 memset (insn_rec, 0, sizeof (struct df_insn_info));
1018 insn_rec->insn = insn;
1019 return insn_rec;
1020 }
1021
1022
1023 /* Delete all du chain (DF_REF_CHAIN()) of all refs in the ref chain. */
1024
1025 static void
1026 df_ref_chain_delete_du_chain (df_ref *ref_rec)
1027 {
1028 while (*ref_rec)
1029 {
1030 df_ref ref = *ref_rec;
1031 /* CHAIN is allocated by DF_CHAIN. So make sure to
1032 pass df_scan instance for the problem. */
1033 if (DF_REF_CHAIN (ref))
1034 df_chain_unlink (ref);
1035 ref_rec++;
1036 }
1037 }
1038
1039
1040 /* Delete all refs in the ref chain. */
1041
1042 static void
1043 df_ref_chain_delete (df_ref *ref_rec)
1044 {
1045 df_ref *start = ref_rec;
1046 while (*ref_rec)
1047 {
1048 df_reg_chain_unlink (*ref_rec);
1049 ref_rec++;
1050 }
1051
1052 /* If the list is empty, it has a special shared element that is not
1053 to be deleted. */
1054 if (*start)
1055 free (start);
1056 }
1057
1058
1059 /* Delete the hardreg chain. */
1060
1061 static void
1062 df_mw_hardreg_chain_delete (struct df_mw_hardreg **hardregs)
1063 {
1064 struct df_scan_problem_data *problem_data;
1065
1066 if (!hardregs)
1067 return;
1068
1069 problem_data = (struct df_scan_problem_data *) df_scan->problem_data;
1070
1071 while (*hardregs)
1072 {
1073 pool_free (problem_data->mw_reg_pool, *hardregs);
1074 hardregs++;
1075 }
1076 }
1077
1078
1079 /* Delete all of the refs information from INSN. BB must be passed in
1080 except when called from df_process_deferred_rescans to mark the block
1081 as dirty. */
1082
1083 void
1084 df_insn_delete (basic_block bb, unsigned int uid)
1085 {
1086 struct df_insn_info *insn_info = NULL;
1087 if (!df)
1088 return;
1089
1090 df_grow_bb_info (df_scan);
1091 df_grow_reg_info ();
1092
1093 /* The block must be marked as dirty now, rather than later as in
1094 df_insn_rescan and df_notes_rescan because it may not be there at
1095 rescanning time and the mark would blow up. */
1096 if (bb)
1097 df_set_bb_dirty (bb);
1098
1099 insn_info = DF_INSN_UID_SAFE_GET (uid);
1100
1101 /* The client has deferred rescanning. */
1102 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1103 {
1104 if (insn_info)
1105 {
1106 bitmap_clear_bit (&df->insns_to_rescan, uid);
1107 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1108 bitmap_set_bit (&df->insns_to_delete, uid);
1109 }
1110 if (dump_file)
1111 fprintf (dump_file, "deferring deletion of insn with uid = %d.\n", uid);
1112 return;
1113 }
1114
1115 if (dump_file)
1116 fprintf (dump_file, "deleting insn with uid = %d.\n", uid);
1117
1118 bitmap_clear_bit (&df->insns_to_delete, uid);
1119 bitmap_clear_bit (&df->insns_to_rescan, uid);
1120 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1121 if (insn_info)
1122 {
1123 struct df_scan_problem_data *problem_data
1124 = (struct df_scan_problem_data *) df_scan->problem_data;
1125
1126 /* In general, notes do not have the insn_info fields
1127 initialized. However, combine deletes insns by changing them
1128 to notes. How clever. So we cannot just check if it is a
1129 valid insn before short circuiting this code, we need to see
1130 if we actually initialized it. */
1131 if (insn_info->defs)
1132 {
1133 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1134
1135 if (df_chain)
1136 {
1137 df_ref_chain_delete_du_chain (insn_info->defs);
1138 df_ref_chain_delete_du_chain (insn_info->uses);
1139 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1140 }
1141
1142 df_ref_chain_delete (insn_info->defs);
1143 df_ref_chain_delete (insn_info->uses);
1144 df_ref_chain_delete (insn_info->eq_uses);
1145 }
1146 pool_free (problem_data->insn_pool, insn_info);
1147 DF_INSN_UID_SET (uid, NULL);
1148 }
1149 }
1150
1151
1152 /* Free all of the refs and the mw_hardregs in COLLECTION_REC. */
1153
1154 static void
1155 df_free_collection_rec (struct df_collection_rec *collection_rec)
1156 {
1157 unsigned int ix;
1158 struct df_scan_problem_data *problem_data
1159 = (struct df_scan_problem_data *) df_scan->problem_data;
1160 df_ref ref;
1161 struct df_mw_hardreg *mw;
1162
1163 FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, ix, ref)
1164 df_free_ref (ref);
1165 FOR_EACH_VEC_ELT (df_ref, collection_rec->use_vec, ix, ref)
1166 df_free_ref (ref);
1167 FOR_EACH_VEC_ELT (df_ref, collection_rec->eq_use_vec, ix, ref)
1168 df_free_ref (ref);
1169 FOR_EACH_VEC_ELT (df_mw_hardreg_ptr, collection_rec->mw_vec, ix, mw)
1170 pool_free (problem_data->mw_reg_pool, mw);
1171
1172 VEC_free (df_ref, stack, collection_rec->def_vec);
1173 VEC_free (df_ref, stack, collection_rec->use_vec);
1174 VEC_free (df_ref, stack, collection_rec->eq_use_vec);
1175 VEC_free (df_mw_hardreg_ptr, stack, collection_rec->mw_vec);
1176 }
1177
1178 /* Rescan INSN. Return TRUE if the rescanning produced any changes. */
1179
1180 bool
1181 df_insn_rescan (rtx insn)
1182 {
1183 unsigned int uid = INSN_UID (insn);
1184 struct df_insn_info *insn_info = NULL;
1185 basic_block bb = BLOCK_FOR_INSN (insn);
1186 struct df_collection_rec collection_rec;
1187
1188 if ((!df) || (!INSN_P (insn)))
1189 return false;
1190
1191 if (!bb)
1192 {
1193 if (dump_file)
1194 fprintf (dump_file, "no bb for insn with uid = %d.\n", uid);
1195 return false;
1196 }
1197
1198 /* The client has disabled rescanning and plans to do it itself. */
1199 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1200 return false;
1201
1202 df_grow_bb_info (df_scan);
1203 df_grow_reg_info ();
1204
1205 insn_info = DF_INSN_UID_SAFE_GET (uid);
1206
1207 /* The client has deferred rescanning. */
1208 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1209 {
1210 if (!insn_info)
1211 {
1212 insn_info = df_insn_create_insn_record (insn);
1213 insn_info->defs = df_null_ref_rec;
1214 insn_info->uses = df_null_ref_rec;
1215 insn_info->eq_uses = df_null_ref_rec;
1216 insn_info->mw_hardregs = df_null_mw_rec;
1217 }
1218 if (dump_file)
1219 fprintf (dump_file, "deferring rescan insn with uid = %d.\n", uid);
1220
1221 bitmap_clear_bit (&df->insns_to_delete, uid);
1222 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1223 bitmap_set_bit (&df->insns_to_rescan, INSN_UID (insn));
1224 return false;
1225 }
1226
1227 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
1228 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
1229 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
1230 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
1231
1232 bitmap_clear_bit (&df->insns_to_delete, uid);
1233 bitmap_clear_bit (&df->insns_to_rescan, uid);
1234 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1235 if (insn_info)
1236 {
1237 int luid;
1238 bool the_same = df_insn_refs_verify (&collection_rec, bb, insn, false);
1239 /* If there's no change, return false. */
1240 if (the_same)
1241 {
1242 df_free_collection_rec (&collection_rec);
1243 if (dump_file)
1244 fprintf (dump_file, "verify found no changes in insn with uid = %d.\n", uid);
1245 return false;
1246 }
1247 if (dump_file)
1248 fprintf (dump_file, "rescanning insn with uid = %d.\n", uid);
1249
1250 /* There's change - we need to delete the existing info.
1251 Since the insn isn't moved, we can salvage its LUID. */
1252 luid = DF_INSN_LUID (insn);
1253 df_insn_delete (NULL, uid);
1254 df_insn_create_insn_record (insn);
1255 DF_INSN_LUID (insn) = luid;
1256 }
1257 else
1258 {
1259 struct df_insn_info *insn_info = df_insn_create_insn_record (insn);
1260 df_insn_refs_collect (&collection_rec, bb, insn_info);
1261 if (dump_file)
1262 fprintf (dump_file, "scanning new insn with uid = %d.\n", uid);
1263 }
1264
1265 df_refs_add_to_chains (&collection_rec, bb, insn);
1266 if (!DEBUG_INSN_P (insn))
1267 df_set_bb_dirty (bb);
1268
1269 VEC_free (df_ref, stack, collection_rec.def_vec);
1270 VEC_free (df_ref, stack, collection_rec.use_vec);
1271 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
1272 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
1273
1274 return true;
1275 }
1276
1277 /* Same as df_insn_rescan, but don't mark the basic block as
1278 dirty. */
1279
1280 bool
1281 df_insn_rescan_debug_internal (rtx insn)
1282 {
1283 unsigned int uid = INSN_UID (insn);
1284 struct df_insn_info *insn_info;
1285
1286 gcc_assert (DEBUG_INSN_P (insn)
1287 && VAR_LOC_UNKNOWN_P (INSN_VAR_LOCATION_LOC (insn)));
1288
1289 if (!df)
1290 return false;
1291
1292 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID (insn));
1293 if (!insn_info)
1294 return false;
1295
1296 if (dump_file)
1297 fprintf (dump_file, "deleting debug_insn with uid = %d.\n", uid);
1298
1299 bitmap_clear_bit (&df->insns_to_delete, uid);
1300 bitmap_clear_bit (&df->insns_to_rescan, uid);
1301 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
1302
1303 if (!insn_info->defs)
1304 return false;
1305
1306 if (insn_info->defs == df_null_ref_rec
1307 && insn_info->uses == df_null_ref_rec
1308 && insn_info->eq_uses == df_null_ref_rec
1309 && insn_info->mw_hardregs == df_null_mw_rec)
1310 return false;
1311
1312 df_mw_hardreg_chain_delete (insn_info->mw_hardregs);
1313
1314 if (df_chain)
1315 {
1316 df_ref_chain_delete_du_chain (insn_info->defs);
1317 df_ref_chain_delete_du_chain (insn_info->uses);
1318 df_ref_chain_delete_du_chain (insn_info->eq_uses);
1319 }
1320
1321 df_ref_chain_delete (insn_info->defs);
1322 df_ref_chain_delete (insn_info->uses);
1323 df_ref_chain_delete (insn_info->eq_uses);
1324
1325 insn_info->defs = df_null_ref_rec;
1326 insn_info->uses = df_null_ref_rec;
1327 insn_info->eq_uses = df_null_ref_rec;
1328 insn_info->mw_hardregs = df_null_mw_rec;
1329
1330 return true;
1331 }
1332
1333
1334 /* Rescan all of the insns in the function. Note that the artificial
1335 uses and defs are not touched. This function will destroy def-se
1336 or use-def chains. */
1337
1338 void
1339 df_insn_rescan_all (void)
1340 {
1341 bool no_insn_rescan = false;
1342 bool defer_insn_rescan = false;
1343 basic_block bb;
1344 bitmap_iterator bi;
1345 unsigned int uid;
1346 bitmap_head tmp;
1347
1348 bitmap_initialize (&tmp, &df_bitmap_obstack);
1349
1350 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1351 {
1352 df_clear_flags (DF_NO_INSN_RESCAN);
1353 no_insn_rescan = true;
1354 }
1355
1356 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1357 {
1358 df_clear_flags (DF_DEFER_INSN_RESCAN);
1359 defer_insn_rescan = true;
1360 }
1361
1362 bitmap_copy (&tmp, &df->insns_to_delete);
1363 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1364 {
1365 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1366 if (insn_info)
1367 df_insn_delete (NULL, uid);
1368 }
1369
1370 bitmap_clear (&tmp);
1371 bitmap_clear (&df->insns_to_delete);
1372 bitmap_clear (&df->insns_to_rescan);
1373 bitmap_clear (&df->insns_to_notes_rescan);
1374
1375 FOR_EACH_BB (bb)
1376 {
1377 rtx insn;
1378 FOR_BB_INSNS (bb, insn)
1379 {
1380 df_insn_rescan (insn);
1381 }
1382 }
1383
1384 if (no_insn_rescan)
1385 df_set_flags (DF_NO_INSN_RESCAN);
1386 if (defer_insn_rescan)
1387 df_set_flags (DF_DEFER_INSN_RESCAN);
1388 }
1389
1390
1391 /* Process all of the deferred rescans or deletions. */
1392
1393 void
1394 df_process_deferred_rescans (void)
1395 {
1396 bool no_insn_rescan = false;
1397 bool defer_insn_rescan = false;
1398 bitmap_iterator bi;
1399 unsigned int uid;
1400 bitmap_head tmp;
1401
1402 bitmap_initialize (&tmp, &df_bitmap_obstack);
1403
1404 if (df->changeable_flags & DF_NO_INSN_RESCAN)
1405 {
1406 df_clear_flags (DF_NO_INSN_RESCAN);
1407 no_insn_rescan = true;
1408 }
1409
1410 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
1411 {
1412 df_clear_flags (DF_DEFER_INSN_RESCAN);
1413 defer_insn_rescan = true;
1414 }
1415
1416 if (dump_file)
1417 fprintf (dump_file, "starting the processing of deferred insns\n");
1418
1419 bitmap_copy (&tmp, &df->insns_to_delete);
1420 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1421 {
1422 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1423 if (insn_info)
1424 df_insn_delete (NULL, uid);
1425 }
1426
1427 bitmap_copy (&tmp, &df->insns_to_rescan);
1428 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1429 {
1430 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1431 if (insn_info)
1432 df_insn_rescan (insn_info->insn);
1433 }
1434
1435 bitmap_copy (&tmp, &df->insns_to_notes_rescan);
1436 EXECUTE_IF_SET_IN_BITMAP (&tmp, 0, uid, bi)
1437 {
1438 struct df_insn_info *insn_info = DF_INSN_UID_SAFE_GET (uid);
1439 if (insn_info)
1440 df_notes_rescan (insn_info->insn);
1441 }
1442
1443 if (dump_file)
1444 fprintf (dump_file, "ending the processing of deferred insns\n");
1445
1446 bitmap_clear (&tmp);
1447 bitmap_clear (&df->insns_to_delete);
1448 bitmap_clear (&df->insns_to_rescan);
1449 bitmap_clear (&df->insns_to_notes_rescan);
1450
1451 if (no_insn_rescan)
1452 df_set_flags (DF_NO_INSN_RESCAN);
1453 if (defer_insn_rescan)
1454 df_set_flags (DF_DEFER_INSN_RESCAN);
1455
1456 /* If someone changed regs_ever_live during this pass, fix up the
1457 entry and exit blocks. */
1458 if (df->redo_entry_and_exit)
1459 {
1460 df_update_entry_exit_and_calls ();
1461 df->redo_entry_and_exit = false;
1462 }
1463 }
1464
1465
1466 /* Count the number of refs. Include the defs if INCLUDE_DEFS. Include
1467 the uses if INCLUDE_USES. Include the eq_uses if
1468 INCLUDE_EQ_USES. */
1469
1470 static unsigned int
1471 df_count_refs (bool include_defs, bool include_uses,
1472 bool include_eq_uses)
1473 {
1474 unsigned int regno;
1475 int size = 0;
1476 unsigned int m = df->regs_inited;
1477
1478 for (regno = 0; regno < m; regno++)
1479 {
1480 if (include_defs)
1481 size += DF_REG_DEF_COUNT (regno);
1482 if (include_uses)
1483 size += DF_REG_USE_COUNT (regno);
1484 if (include_eq_uses)
1485 size += DF_REG_EQ_USE_COUNT (regno);
1486 }
1487 return size;
1488 }
1489
1490
1491 /* Take build ref table for either the uses or defs from the reg-use
1492 or reg-def chains. This version processes the refs in reg order
1493 which is likely to be best if processing the whole function. */
1494
1495 static void
1496 df_reorganize_refs_by_reg_by_reg (struct df_ref_info *ref_info,
1497 bool include_defs,
1498 bool include_uses,
1499 bool include_eq_uses)
1500 {
1501 unsigned int m = df->regs_inited;
1502 unsigned int regno;
1503 unsigned int offset = 0;
1504 unsigned int start;
1505
1506 if (df->changeable_flags & DF_NO_HARD_REGS)
1507 {
1508 start = FIRST_PSEUDO_REGISTER;
1509 memset (ref_info->begin, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1510 memset (ref_info->count, 0, sizeof (int) * FIRST_PSEUDO_REGISTER);
1511 }
1512 else
1513 start = 0;
1514
1515 ref_info->total_size
1516 = df_count_refs (include_defs, include_uses, include_eq_uses);
1517
1518 df_check_and_grow_ref_info (ref_info, 1);
1519
1520 for (regno = start; regno < m; regno++)
1521 {
1522 int count = 0;
1523 ref_info->begin[regno] = offset;
1524 if (include_defs)
1525 {
1526 df_ref ref = DF_REG_DEF_CHAIN (regno);
1527 while (ref)
1528 {
1529 ref_info->refs[offset] = ref;
1530 DF_REF_ID (ref) = offset++;
1531 count++;
1532 ref = DF_REF_NEXT_REG (ref);
1533 gcc_checking_assert (offset < ref_info->refs_size);
1534 }
1535 }
1536 if (include_uses)
1537 {
1538 df_ref ref = DF_REG_USE_CHAIN (regno);
1539 while (ref)
1540 {
1541 ref_info->refs[offset] = ref;
1542 DF_REF_ID (ref) = offset++;
1543 count++;
1544 ref = DF_REF_NEXT_REG (ref);
1545 gcc_checking_assert (offset < ref_info->refs_size);
1546 }
1547 }
1548 if (include_eq_uses)
1549 {
1550 df_ref ref = DF_REG_EQ_USE_CHAIN (regno);
1551 while (ref)
1552 {
1553 ref_info->refs[offset] = ref;
1554 DF_REF_ID (ref) = offset++;
1555 count++;
1556 ref = DF_REF_NEXT_REG (ref);
1557 gcc_checking_assert (offset < ref_info->refs_size);
1558 }
1559 }
1560 ref_info->count[regno] = count;
1561 }
1562
1563 /* The bitmap size is not decremented when refs are deleted. So
1564 reset it now that we have squished out all of the empty
1565 slots. */
1566 ref_info->table_size = offset;
1567 }
1568
1569
1570 /* Take build ref table for either the uses or defs from the reg-use
1571 or reg-def chains. This version processes the refs in insn order
1572 which is likely to be best if processing some segment of the
1573 function. */
1574
1575 static void
1576 df_reorganize_refs_by_reg_by_insn (struct df_ref_info *ref_info,
1577 bool include_defs,
1578 bool include_uses,
1579 bool include_eq_uses)
1580 {
1581 bitmap_iterator bi;
1582 unsigned int bb_index;
1583 unsigned int m = df->regs_inited;
1584 unsigned int offset = 0;
1585 unsigned int r;
1586 unsigned int start
1587 = (df->changeable_flags & DF_NO_HARD_REGS) ? FIRST_PSEUDO_REGISTER : 0;
1588
1589 memset (ref_info->begin, 0, sizeof (int) * df->regs_inited);
1590 memset (ref_info->count, 0, sizeof (int) * df->regs_inited);
1591
1592 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1593 df_check_and_grow_ref_info (ref_info, 1);
1594
1595 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1596 {
1597 basic_block bb = BASIC_BLOCK (bb_index);
1598 rtx insn;
1599 df_ref *ref_rec;
1600
1601 if (include_defs)
1602 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1603 {
1604 unsigned int regno = DF_REF_REGNO (*ref_rec);
1605 ref_info->count[regno]++;
1606 }
1607 if (include_uses)
1608 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1609 {
1610 unsigned int regno = DF_REF_REGNO (*ref_rec);
1611 ref_info->count[regno]++;
1612 }
1613
1614 FOR_BB_INSNS (bb, insn)
1615 {
1616 if (INSN_P (insn))
1617 {
1618 unsigned int uid = INSN_UID (insn);
1619
1620 if (include_defs)
1621 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1622 {
1623 unsigned int regno = DF_REF_REGNO (*ref_rec);
1624 ref_info->count[regno]++;
1625 }
1626 if (include_uses)
1627 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1628 {
1629 unsigned int regno = DF_REF_REGNO (*ref_rec);
1630 ref_info->count[regno]++;
1631 }
1632 if (include_eq_uses)
1633 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1634 {
1635 unsigned int regno = DF_REF_REGNO (*ref_rec);
1636 ref_info->count[regno]++;
1637 }
1638 }
1639 }
1640 }
1641
1642 for (r = start; r < m; r++)
1643 {
1644 ref_info->begin[r] = offset;
1645 offset += ref_info->count[r];
1646 ref_info->count[r] = 0;
1647 }
1648
1649 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, bb_index, bi)
1650 {
1651 basic_block bb = BASIC_BLOCK (bb_index);
1652 rtx insn;
1653 df_ref *ref_rec;
1654
1655 if (include_defs)
1656 for (ref_rec = df_get_artificial_defs (bb_index); *ref_rec; ref_rec++)
1657 {
1658 df_ref ref = *ref_rec;
1659 unsigned int regno = DF_REF_REGNO (ref);
1660 if (regno >= start)
1661 {
1662 unsigned int id
1663 = ref_info->begin[regno] + ref_info->count[regno]++;
1664 DF_REF_ID (ref) = id;
1665 ref_info->refs[id] = ref;
1666 }
1667 }
1668 if (include_uses)
1669 for (ref_rec = df_get_artificial_uses (bb_index); *ref_rec; ref_rec++)
1670 {
1671 df_ref ref = *ref_rec;
1672 unsigned int regno = DF_REF_REGNO (ref);
1673 if (regno >= start)
1674 {
1675 unsigned int id
1676 = ref_info->begin[regno] + ref_info->count[regno]++;
1677 DF_REF_ID (ref) = id;
1678 ref_info->refs[id] = ref;
1679 }
1680 }
1681
1682 FOR_BB_INSNS (bb, insn)
1683 {
1684 if (INSN_P (insn))
1685 {
1686 unsigned int uid = INSN_UID (insn);
1687
1688 if (include_defs)
1689 for (ref_rec = DF_INSN_UID_DEFS (uid); *ref_rec; ref_rec++)
1690 {
1691 df_ref ref = *ref_rec;
1692 unsigned int regno = DF_REF_REGNO (ref);
1693 if (regno >= start)
1694 {
1695 unsigned int id
1696 = ref_info->begin[regno] + ref_info->count[regno]++;
1697 DF_REF_ID (ref) = id;
1698 ref_info->refs[id] = ref;
1699 }
1700 }
1701 if (include_uses)
1702 for (ref_rec = DF_INSN_UID_USES (uid); *ref_rec; ref_rec++)
1703 {
1704 df_ref ref = *ref_rec;
1705 unsigned int regno = DF_REF_REGNO (ref);
1706 if (regno >= start)
1707 {
1708 unsigned int id
1709 = ref_info->begin[regno] + ref_info->count[regno]++;
1710 DF_REF_ID (ref) = id;
1711 ref_info->refs[id] = ref;
1712 }
1713 }
1714 if (include_eq_uses)
1715 for (ref_rec = DF_INSN_UID_EQ_USES (uid); *ref_rec; ref_rec++)
1716 {
1717 df_ref ref = *ref_rec;
1718 unsigned int regno = DF_REF_REGNO (ref);
1719 if (regno >= start)
1720 {
1721 unsigned int id
1722 = ref_info->begin[regno] + ref_info->count[regno]++;
1723 DF_REF_ID (ref) = id;
1724 ref_info->refs[id] = ref;
1725 }
1726 }
1727 }
1728 }
1729 }
1730
1731 /* The bitmap size is not decremented when refs are deleted. So
1732 reset it now that we have squished out all of the empty
1733 slots. */
1734
1735 ref_info->table_size = offset;
1736 }
1737
1738 /* Take build ref table for either the uses or defs from the reg-use
1739 or reg-def chains. */
1740
1741 static void
1742 df_reorganize_refs_by_reg (struct df_ref_info *ref_info,
1743 bool include_defs,
1744 bool include_uses,
1745 bool include_eq_uses)
1746 {
1747 if (df->analyze_subset)
1748 df_reorganize_refs_by_reg_by_insn (ref_info, include_defs,
1749 include_uses, include_eq_uses);
1750 else
1751 df_reorganize_refs_by_reg_by_reg (ref_info, include_defs,
1752 include_uses, include_eq_uses);
1753 }
1754
1755
1756 /* Add the refs in REF_VEC to the table in REF_INFO starting at OFFSET. */
1757 static unsigned int
1758 df_add_refs_to_table (unsigned int offset,
1759 struct df_ref_info *ref_info,
1760 df_ref *ref_vec)
1761 {
1762 while (*ref_vec)
1763 {
1764 df_ref ref = *ref_vec;
1765 if ((!(df->changeable_flags & DF_NO_HARD_REGS))
1766 || (DF_REF_REGNO (ref) >= FIRST_PSEUDO_REGISTER))
1767 {
1768 ref_info->refs[offset] = ref;
1769 DF_REF_ID (*ref_vec) = offset++;
1770 }
1771 ref_vec++;
1772 }
1773 return offset;
1774 }
1775
1776
1777 /* Count the number of refs in all of the insns of BB. Include the
1778 defs if INCLUDE_DEFS. Include the uses if INCLUDE_USES. Include the
1779 eq_uses if INCLUDE_EQ_USES. */
1780
1781 static unsigned int
1782 df_reorganize_refs_by_insn_bb (basic_block bb, unsigned int offset,
1783 struct df_ref_info *ref_info,
1784 bool include_defs, bool include_uses,
1785 bool include_eq_uses)
1786 {
1787 rtx insn;
1788
1789 if (include_defs)
1790 offset = df_add_refs_to_table (offset, ref_info,
1791 df_get_artificial_defs (bb->index));
1792 if (include_uses)
1793 offset = df_add_refs_to_table (offset, ref_info,
1794 df_get_artificial_uses (bb->index));
1795
1796 FOR_BB_INSNS (bb, insn)
1797 if (INSN_P (insn))
1798 {
1799 unsigned int uid = INSN_UID (insn);
1800 if (include_defs)
1801 offset = df_add_refs_to_table (offset, ref_info,
1802 DF_INSN_UID_DEFS (uid));
1803 if (include_uses)
1804 offset = df_add_refs_to_table (offset, ref_info,
1805 DF_INSN_UID_USES (uid));
1806 if (include_eq_uses)
1807 offset = df_add_refs_to_table (offset, ref_info,
1808 DF_INSN_UID_EQ_USES (uid));
1809 }
1810 return offset;
1811 }
1812
1813
1814 /* Organize the refs by insn into the table in REF_INFO. If
1815 blocks_to_analyze is defined, use that set, otherwise the entire
1816 program. Include the defs if INCLUDE_DEFS. Include the uses if
1817 INCLUDE_USES. Include the eq_uses if INCLUDE_EQ_USES. */
1818
1819 static void
1820 df_reorganize_refs_by_insn (struct df_ref_info *ref_info,
1821 bool include_defs, bool include_uses,
1822 bool include_eq_uses)
1823 {
1824 basic_block bb;
1825 unsigned int offset = 0;
1826
1827 ref_info->total_size = df_count_refs (include_defs, include_uses, include_eq_uses);
1828 df_check_and_grow_ref_info (ref_info, 1);
1829 if (df->blocks_to_analyze)
1830 {
1831 bitmap_iterator bi;
1832 unsigned int index;
1833
1834 EXECUTE_IF_SET_IN_BITMAP (df->blocks_to_analyze, 0, index, bi)
1835 {
1836 offset = df_reorganize_refs_by_insn_bb (BASIC_BLOCK (index), offset, ref_info,
1837 include_defs, include_uses,
1838 include_eq_uses);
1839 }
1840
1841 ref_info->table_size = offset;
1842 }
1843 else
1844 {
1845 FOR_ALL_BB (bb)
1846 offset = df_reorganize_refs_by_insn_bb (bb, offset, ref_info,
1847 include_defs, include_uses,
1848 include_eq_uses);
1849 ref_info->table_size = offset;
1850 }
1851 }
1852
1853
1854 /* If the use refs in DF are not organized, reorganize them. */
1855
1856 void
1857 df_maybe_reorganize_use_refs (enum df_ref_order order)
1858 {
1859 if (order == df->use_info.ref_order)
1860 return;
1861
1862 switch (order)
1863 {
1864 case DF_REF_ORDER_BY_REG:
1865 df_reorganize_refs_by_reg (&df->use_info, false, true, false);
1866 break;
1867
1868 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1869 df_reorganize_refs_by_reg (&df->use_info, false, true, true);
1870 break;
1871
1872 case DF_REF_ORDER_BY_INSN:
1873 df_reorganize_refs_by_insn (&df->use_info, false, true, false);
1874 break;
1875
1876 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1877 df_reorganize_refs_by_insn (&df->use_info, false, true, true);
1878 break;
1879
1880 case DF_REF_ORDER_NO_TABLE:
1881 free (df->use_info.refs);
1882 df->use_info.refs = NULL;
1883 df->use_info.refs_size = 0;
1884 break;
1885
1886 case DF_REF_ORDER_UNORDERED:
1887 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1888 gcc_unreachable ();
1889 break;
1890 }
1891
1892 df->use_info.ref_order = order;
1893 }
1894
1895
1896 /* If the def refs in DF are not organized, reorganize them. */
1897
1898 void
1899 df_maybe_reorganize_def_refs (enum df_ref_order order)
1900 {
1901 if (order == df->def_info.ref_order)
1902 return;
1903
1904 switch (order)
1905 {
1906 case DF_REF_ORDER_BY_REG:
1907 df_reorganize_refs_by_reg (&df->def_info, true, false, false);
1908 break;
1909
1910 case DF_REF_ORDER_BY_INSN:
1911 df_reorganize_refs_by_insn (&df->def_info, true, false, false);
1912 break;
1913
1914 case DF_REF_ORDER_NO_TABLE:
1915 free (df->def_info.refs);
1916 df->def_info.refs = NULL;
1917 df->def_info.refs_size = 0;
1918 break;
1919
1920 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
1921 case DF_REF_ORDER_BY_REG_WITH_NOTES:
1922 case DF_REF_ORDER_UNORDERED:
1923 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
1924 gcc_unreachable ();
1925 break;
1926 }
1927
1928 df->def_info.ref_order = order;
1929 }
1930
1931
1932 /* Change all of the basic block references in INSN to use the insn's
1933 current basic block. This function is called from routines that move
1934 instructions from one block to another. */
1935
1936 void
1937 df_insn_change_bb (rtx insn, basic_block new_bb)
1938 {
1939 basic_block old_bb = BLOCK_FOR_INSN (insn);
1940 struct df_insn_info *insn_info;
1941 unsigned int uid = INSN_UID (insn);
1942
1943 if (old_bb == new_bb)
1944 return;
1945
1946 set_block_for_insn (insn, new_bb);
1947
1948 if (!df)
1949 return;
1950
1951 if (dump_file)
1952 fprintf (dump_file, "changing bb of uid %d\n", uid);
1953
1954 insn_info = DF_INSN_UID_SAFE_GET (uid);
1955 if (insn_info == NULL)
1956 {
1957 if (dump_file)
1958 fprintf (dump_file, " unscanned insn\n");
1959 df_insn_rescan (insn);
1960 return;
1961 }
1962
1963 if (!INSN_P (insn))
1964 return;
1965
1966 df_set_bb_dirty (new_bb);
1967 if (old_bb)
1968 {
1969 if (dump_file)
1970 fprintf (dump_file, " from %d to %d\n",
1971 old_bb->index, new_bb->index);
1972 df_set_bb_dirty (old_bb);
1973 }
1974 else
1975 if (dump_file)
1976 fprintf (dump_file, " to %d\n", new_bb->index);
1977 }
1978
1979
1980 /* Helper function for df_ref_change_reg_with_loc. */
1981
1982 static void
1983 df_ref_change_reg_with_loc_1 (struct df_reg_info *old_df,
1984 struct df_reg_info *new_df,
1985 int new_regno, rtx loc)
1986 {
1987 df_ref the_ref = old_df->reg_chain;
1988
1989 while (the_ref)
1990 {
1991 if ((!DF_REF_IS_ARTIFICIAL (the_ref))
1992 && DF_REF_LOC (the_ref)
1993 && (*DF_REF_LOC (the_ref) == loc))
1994 {
1995 df_ref next_ref = DF_REF_NEXT_REG (the_ref);
1996 df_ref prev_ref = DF_REF_PREV_REG (the_ref);
1997 df_ref *ref_vec, *ref_vec_t;
1998 struct df_insn_info *insn_info = DF_REF_INSN_INFO (the_ref);
1999 unsigned int count = 0;
2000
2001 DF_REF_REGNO (the_ref) = new_regno;
2002 DF_REF_REG (the_ref) = regno_reg_rtx[new_regno];
2003
2004 /* Pull the_ref out of the old regno chain. */
2005 if (prev_ref)
2006 DF_REF_NEXT_REG (prev_ref) = next_ref;
2007 else
2008 old_df->reg_chain = next_ref;
2009 if (next_ref)
2010 DF_REF_PREV_REG (next_ref) = prev_ref;
2011 old_df->n_refs--;
2012
2013 /* Put the ref into the new regno chain. */
2014 DF_REF_PREV_REG (the_ref) = NULL;
2015 DF_REF_NEXT_REG (the_ref) = new_df->reg_chain;
2016 if (new_df->reg_chain)
2017 DF_REF_PREV_REG (new_df->reg_chain) = the_ref;
2018 new_df->reg_chain = the_ref;
2019 new_df->n_refs++;
2020 if (DF_REF_BB (the_ref))
2021 df_set_bb_dirty (DF_REF_BB (the_ref));
2022
2023 /* Need to sort the record again that the ref was in because
2024 the regno is a sorting key. First, find the right
2025 record. */
2026 if (DF_REF_FLAGS (the_ref) & DF_REF_IN_NOTE)
2027 ref_vec = insn_info->eq_uses;
2028 else
2029 ref_vec = insn_info->uses;
2030 if (dump_file)
2031 fprintf (dump_file, "changing reg in insn %d\n",
2032 DF_REF_INSN_UID (the_ref));
2033
2034 ref_vec_t = ref_vec;
2035
2036 /* Find the length. */
2037 while (*ref_vec_t)
2038 {
2039 count++;
2040 ref_vec_t++;
2041 }
2042 qsort (ref_vec, count, sizeof (df_ref ), df_ref_compare);
2043
2044 the_ref = next_ref;
2045 }
2046 else
2047 the_ref = DF_REF_NEXT_REG (the_ref);
2048 }
2049 }
2050
2051
2052 /* Change the regno of all refs that contained LOC from OLD_REGNO to
2053 NEW_REGNO. Refs that do not match LOC are not changed which means
2054 that artificial refs are not changed since they have no loc. This
2055 call is to support the SET_REGNO macro. */
2056
2057 void
2058 df_ref_change_reg_with_loc (int old_regno, int new_regno, rtx loc)
2059 {
2060 if ((!df) || (old_regno == -1) || (old_regno == new_regno))
2061 return;
2062
2063 df_grow_reg_info ();
2064
2065 df_ref_change_reg_with_loc_1 (DF_REG_DEF_GET (old_regno),
2066 DF_REG_DEF_GET (new_regno), new_regno, loc);
2067 df_ref_change_reg_with_loc_1 (DF_REG_USE_GET (old_regno),
2068 DF_REG_USE_GET (new_regno), new_regno, loc);
2069 df_ref_change_reg_with_loc_1 (DF_REG_EQ_USE_GET (old_regno),
2070 DF_REG_EQ_USE_GET (new_regno), new_regno, loc);
2071 }
2072
2073
2074 /* Delete the mw_hardregs that point into the eq_notes. */
2075
2076 static unsigned int
2077 df_mw_hardreg_chain_delete_eq_uses (struct df_insn_info *insn_info)
2078 {
2079 struct df_mw_hardreg **mw_vec = insn_info->mw_hardregs;
2080 unsigned int deleted = 0;
2081 unsigned int count = 0;
2082 struct df_scan_problem_data *problem_data
2083 = (struct df_scan_problem_data *) df_scan->problem_data;
2084
2085 if (!*mw_vec)
2086 return 0;
2087
2088 while (*mw_vec)
2089 {
2090 if ((*mw_vec)->flags & DF_REF_IN_NOTE)
2091 {
2092 struct df_mw_hardreg **temp_vec = mw_vec;
2093
2094 pool_free (problem_data->mw_reg_pool, *mw_vec);
2095 temp_vec = mw_vec;
2096 /* Shove the remaining ones down one to fill the gap. While
2097 this looks n**2, it is highly unusual to have any mw regs
2098 in eq_notes and the chances of more than one are almost
2099 non existent. */
2100 while (*temp_vec)
2101 {
2102 *temp_vec = *(temp_vec + 1);
2103 temp_vec++;
2104 }
2105 deleted++;
2106 }
2107 else
2108 {
2109 mw_vec++;
2110 count++;
2111 }
2112 }
2113
2114 if (count == 0)
2115 {
2116 df_scan_free_mws_vec (insn_info->mw_hardregs);
2117 insn_info->mw_hardregs = df_null_mw_rec;
2118 return 0;
2119 }
2120 return deleted;
2121 }
2122
2123
2124 /* Rescan only the REG_EQUIV/REG_EQUAL notes part of INSN. */
2125
2126 void
2127 df_notes_rescan (rtx insn)
2128 {
2129 struct df_insn_info *insn_info;
2130 unsigned int uid = INSN_UID (insn);
2131
2132 if (!df)
2133 return;
2134
2135 /* The client has disabled rescanning and plans to do it itself. */
2136 if (df->changeable_flags & DF_NO_INSN_RESCAN)
2137 return;
2138
2139 /* Do nothing if the insn hasn't been emitted yet. */
2140 if (!BLOCK_FOR_INSN (insn))
2141 return;
2142
2143 df_grow_bb_info (df_scan);
2144 df_grow_reg_info ();
2145
2146 insn_info = DF_INSN_UID_SAFE_GET (INSN_UID(insn));
2147
2148 /* The client has deferred rescanning. */
2149 if (df->changeable_flags & DF_DEFER_INSN_RESCAN)
2150 {
2151 if (!insn_info)
2152 {
2153 insn_info = df_insn_create_insn_record (insn);
2154 insn_info->defs = df_null_ref_rec;
2155 insn_info->uses = df_null_ref_rec;
2156 insn_info->eq_uses = df_null_ref_rec;
2157 insn_info->mw_hardregs = df_null_mw_rec;
2158 }
2159
2160 bitmap_clear_bit (&df->insns_to_delete, uid);
2161 /* If the insn is set to be rescanned, it does not need to also
2162 be notes rescanned. */
2163 if (!bitmap_bit_p (&df->insns_to_rescan, uid))
2164 bitmap_set_bit (&df->insns_to_notes_rescan, INSN_UID (insn));
2165 return;
2166 }
2167
2168 bitmap_clear_bit (&df->insns_to_delete, uid);
2169 bitmap_clear_bit (&df->insns_to_notes_rescan, uid);
2170
2171 if (insn_info)
2172 {
2173 basic_block bb = BLOCK_FOR_INSN (insn);
2174 rtx note;
2175 struct df_collection_rec collection_rec;
2176 unsigned int num_deleted;
2177 unsigned int mw_len;
2178
2179 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
2180 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
2181 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
2182
2183 num_deleted = df_mw_hardreg_chain_delete_eq_uses (insn_info);
2184 df_ref_chain_delete (insn_info->eq_uses);
2185 insn_info->eq_uses = NULL;
2186
2187 /* Process REG_EQUIV/REG_EQUAL notes */
2188 for (note = REG_NOTES (insn); note;
2189 note = XEXP (note, 1))
2190 {
2191 switch (REG_NOTE_KIND (note))
2192 {
2193 case REG_EQUIV:
2194 case REG_EQUAL:
2195 df_uses_record (&collection_rec,
2196 &XEXP (note, 0), DF_REF_REG_USE,
2197 bb, insn_info, DF_REF_IN_NOTE);
2198 default:
2199 break;
2200 }
2201 }
2202
2203 /* Find some place to put any new mw_hardregs. */
2204 df_canonize_collection_rec (&collection_rec);
2205 mw_len = VEC_length (df_mw_hardreg_ptr, collection_rec.mw_vec);
2206 if (mw_len)
2207 {
2208 unsigned int count = 0;
2209 struct df_mw_hardreg **mw_rec = insn_info->mw_hardregs;
2210 while (*mw_rec)
2211 {
2212 count++;
2213 mw_rec++;
2214 }
2215
2216 if (count)
2217 {
2218 /* Append to the end of the existing record after
2219 expanding it if necessary. */
2220 if (mw_len > num_deleted)
2221 {
2222 insn_info->mw_hardregs =
2223 XRESIZEVEC (struct df_mw_hardreg *,
2224 insn_info->mw_hardregs,
2225 count + 1 + mw_len);
2226 }
2227 memcpy (&insn_info->mw_hardregs[count],
2228 VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2229 mw_len * sizeof (struct df_mw_hardreg *));
2230 insn_info->mw_hardregs[count + mw_len] = NULL;
2231 qsort (insn_info->mw_hardregs, count + mw_len,
2232 sizeof (struct df_mw_hardreg *), df_mw_compare);
2233 }
2234 else
2235 {
2236 /* No vector there. */
2237 insn_info->mw_hardregs
2238 = XNEWVEC (struct df_mw_hardreg*, 1 + mw_len);
2239 memcpy (insn_info->mw_hardregs,
2240 VEC_address (df_mw_hardreg_ptr, collection_rec.mw_vec),
2241 mw_len * sizeof (struct df_mw_hardreg *));
2242 insn_info->mw_hardregs[mw_len] = NULL;
2243 }
2244 }
2245 /* Get rid of the mw_rec so that df_refs_add_to_chains will
2246 ignore it. */
2247 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
2248 df_refs_add_to_chains (&collection_rec, bb, insn);
2249 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
2250 }
2251 else
2252 df_insn_rescan (insn);
2253
2254 }
2255
2256 \f
2257 /*----------------------------------------------------------------------------
2258 Hard core instruction scanning code. No external interfaces here,
2259 just a lot of routines that look inside insns.
2260 ----------------------------------------------------------------------------*/
2261
2262
2263 /* Return true if the contents of two df_ref's are identical.
2264 It ignores DF_REF_MARKER. */
2265
2266 static bool
2267 df_ref_equal_p (df_ref ref1, df_ref ref2)
2268 {
2269 if (!ref2)
2270 return false;
2271
2272 if (ref1 == ref2)
2273 return true;
2274
2275 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2)
2276 || DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2)
2277 || DF_REF_REG (ref1) != DF_REF_REG (ref2)
2278 || DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2)
2279 || ((DF_REF_FLAGS (ref1) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG))
2280 != (DF_REF_FLAGS (ref2) & ~(DF_REF_REG_MARKER + DF_REF_MW_HARDREG)))
2281 || DF_REF_BB (ref1) != DF_REF_BB (ref2)
2282 || DF_REF_INSN_INFO (ref1) != DF_REF_INSN_INFO (ref2))
2283 return false;
2284
2285 switch (DF_REF_CLASS (ref1))
2286 {
2287 case DF_REF_ARTIFICIAL:
2288 case DF_REF_BASE:
2289 return true;
2290
2291 case DF_REF_REGULAR:
2292 return DF_REF_LOC (ref1) == DF_REF_LOC (ref2);
2293
2294 default:
2295 gcc_unreachable ();
2296 }
2297 return false;
2298 }
2299
2300
2301 /* Compare REF1 and REF2 for sorting. This is only called from places
2302 where all of the refs are of the same type, in the same insn, and
2303 have the same bb. So these fields are not checked. */
2304
2305 static int
2306 df_ref_compare (const void *r1, const void *r2)
2307 {
2308 const df_ref ref1 = *(const df_ref *)r1;
2309 const df_ref ref2 = *(const df_ref *)r2;
2310
2311 if (ref1 == ref2)
2312 return 0;
2313
2314 if (DF_REF_CLASS (ref1) != DF_REF_CLASS (ref2))
2315 return (int)DF_REF_CLASS (ref1) - (int)DF_REF_CLASS (ref2);
2316
2317 if (DF_REF_REGNO (ref1) != DF_REF_REGNO (ref2))
2318 return (int)DF_REF_REGNO (ref1) - (int)DF_REF_REGNO (ref2);
2319
2320 if (DF_REF_TYPE (ref1) != DF_REF_TYPE (ref2))
2321 return (int)DF_REF_TYPE (ref1) - (int)DF_REF_TYPE (ref2);
2322
2323 if (DF_REF_REG (ref1) != DF_REF_REG (ref2))
2324 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2325
2326 /* Cannot look at the LOC field on artificial refs. */
2327 if (DF_REF_CLASS (ref1) != DF_REF_ARTIFICIAL
2328 && DF_REF_LOC (ref1) != DF_REF_LOC (ref2))
2329 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2330
2331 if (DF_REF_FLAGS (ref1) != DF_REF_FLAGS (ref2))
2332 {
2333 /* If two refs are identical except that one of them has is from
2334 a mw and one is not, we need to have the one with the mw
2335 first. */
2336 if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG) ==
2337 DF_REF_FLAGS_IS_SET (ref2, DF_REF_MW_HARDREG))
2338 return DF_REF_FLAGS (ref1) - DF_REF_FLAGS (ref2);
2339 else if (DF_REF_FLAGS_IS_SET (ref1, DF_REF_MW_HARDREG))
2340 return -1;
2341 else
2342 return 1;
2343 }
2344
2345 return (int)DF_REF_ORDER (ref1) - (int)DF_REF_ORDER (ref2);
2346 }
2347
2348 static void
2349 df_swap_refs (VEC(df_ref,stack) **ref_vec, int i, int j)
2350 {
2351 df_ref tmp = VEC_index (df_ref, *ref_vec, i);
2352 VEC_replace (df_ref, *ref_vec, i, VEC_index (df_ref, *ref_vec, j));
2353 VEC_replace (df_ref, *ref_vec, j, tmp);
2354 }
2355
2356 /* Sort and compress a set of refs. */
2357
2358 static void
2359 df_sort_and_compress_refs (VEC(df_ref,stack) **ref_vec)
2360 {
2361 unsigned int count;
2362 unsigned int i;
2363 unsigned int dist = 0;
2364
2365 count = VEC_length (df_ref, *ref_vec);
2366
2367 /* If there are 1 or 0 elements, there is nothing to do. */
2368 if (count < 2)
2369 return;
2370 else if (count == 2)
2371 {
2372 df_ref r0 = VEC_index (df_ref, *ref_vec, 0);
2373 df_ref r1 = VEC_index (df_ref, *ref_vec, 1);
2374 if (df_ref_compare (&r0, &r1) > 0)
2375 df_swap_refs (ref_vec, 0, 1);
2376 }
2377 else
2378 {
2379 for (i = 0; i < count - 1; i++)
2380 {
2381 df_ref r0 = VEC_index (df_ref, *ref_vec, i);
2382 df_ref r1 = VEC_index (df_ref, *ref_vec, i + 1);
2383 if (df_ref_compare (&r0, &r1) >= 0)
2384 break;
2385 }
2386 /* If the array is already strictly ordered,
2387 which is the most common case for large COUNT case
2388 (which happens for CALL INSNs),
2389 no need to sort and filter out duplicate.
2390 Simply return the count.
2391 Make sure DF_GET_ADD_REFS adds refs in the increasing order
2392 of DF_REF_COMPARE. */
2393 if (i == count - 1)
2394 return;
2395 VEC_qsort (df_ref, *ref_vec, df_ref_compare);
2396 }
2397
2398 for (i=0; i<count-dist; i++)
2399 {
2400 /* Find the next ref that is not equal to the current ref. */
2401 while (i + dist + 1 < count
2402 && df_ref_equal_p (VEC_index (df_ref, *ref_vec, i),
2403 VEC_index (df_ref, *ref_vec, i + dist + 1)))
2404 {
2405 df_free_ref (VEC_index (df_ref, *ref_vec, i + dist + 1));
2406 dist++;
2407 }
2408 /* Copy it down to the next position. */
2409 if (dist && i + dist + 1 < count)
2410 VEC_replace (df_ref, *ref_vec, i + 1,
2411 VEC_index (df_ref, *ref_vec, i + dist + 1));
2412 }
2413
2414 count -= dist;
2415 VEC_truncate (df_ref, *ref_vec, count);
2416 }
2417
2418
2419 /* Return true if the contents of two df_ref's are identical.
2420 It ignores DF_REF_MARKER. */
2421
2422 static bool
2423 df_mw_equal_p (struct df_mw_hardreg *mw1, struct df_mw_hardreg *mw2)
2424 {
2425 if (!mw2)
2426 return false;
2427 return (mw1 == mw2) ||
2428 (mw1->mw_reg == mw2->mw_reg
2429 && mw1->type == mw2->type
2430 && mw1->flags == mw2->flags
2431 && mw1->start_regno == mw2->start_regno
2432 && mw1->end_regno == mw2->end_regno);
2433 }
2434
2435
2436 /* Compare MW1 and MW2 for sorting. */
2437
2438 static int
2439 df_mw_compare (const void *m1, const void *m2)
2440 {
2441 const struct df_mw_hardreg *const mw1 = *(const struct df_mw_hardreg *const*)m1;
2442 const struct df_mw_hardreg *const mw2 = *(const struct df_mw_hardreg *const*)m2;
2443
2444 if (mw1 == mw2)
2445 return 0;
2446
2447 if (mw1->type != mw2->type)
2448 return mw1->type - mw2->type;
2449
2450 if (mw1->flags != mw2->flags)
2451 return mw1->flags - mw2->flags;
2452
2453 if (mw1->start_regno != mw2->start_regno)
2454 return mw1->start_regno - mw2->start_regno;
2455
2456 if (mw1->end_regno != mw2->end_regno)
2457 return mw1->end_regno - mw2->end_regno;
2458
2459 if (mw1->mw_reg != mw2->mw_reg)
2460 return mw1->mw_order - mw2->mw_order;
2461
2462 return 0;
2463 }
2464
2465
2466 /* Sort and compress a set of refs. */
2467
2468 static void
2469 df_sort_and_compress_mws (VEC(df_mw_hardreg_ptr,stack) **mw_vec)
2470 {
2471 unsigned int count;
2472 struct df_scan_problem_data *problem_data
2473 = (struct df_scan_problem_data *) df_scan->problem_data;
2474 unsigned int i;
2475 unsigned int dist = 0;
2476
2477 count = VEC_length (df_mw_hardreg_ptr, *mw_vec);
2478 if (count < 2)
2479 return;
2480 else if (count == 2)
2481 {
2482 struct df_mw_hardreg *m0 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 0);
2483 struct df_mw_hardreg *m1 = VEC_index (df_mw_hardreg_ptr, *mw_vec, 1);
2484 if (df_mw_compare (&m0, &m1) > 0)
2485 {
2486 struct df_mw_hardreg *tmp = VEC_index (df_mw_hardreg_ptr,
2487 *mw_vec, 0);
2488 VEC_replace (df_mw_hardreg_ptr, *mw_vec, 0,
2489 VEC_index (df_mw_hardreg_ptr, *mw_vec, 1));
2490 VEC_replace (df_mw_hardreg_ptr, *mw_vec, 1, tmp);
2491 }
2492 }
2493 else
2494 VEC_qsort (df_mw_hardreg_ptr, *mw_vec, df_mw_compare);
2495
2496 for (i=0; i<count-dist; i++)
2497 {
2498 /* Find the next ref that is not equal to the current ref. */
2499 while (i + dist + 1 < count
2500 && df_mw_equal_p (VEC_index (df_mw_hardreg_ptr, *mw_vec, i),
2501 VEC_index (df_mw_hardreg_ptr, *mw_vec,
2502 i + dist + 1)))
2503 {
2504 pool_free (problem_data->mw_reg_pool,
2505 VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
2506 dist++;
2507 }
2508 /* Copy it down to the next position. */
2509 if (dist && i + dist + 1 < count)
2510 VEC_replace (df_mw_hardreg_ptr, *mw_vec, i + 1,
2511 VEC_index (df_mw_hardreg_ptr, *mw_vec, i + dist + 1));
2512 }
2513
2514 count -= dist;
2515 VEC_truncate (df_mw_hardreg_ptr, *mw_vec, count);
2516 }
2517
2518
2519 /* Sort and remove duplicates from the COLLECTION_REC. */
2520
2521 static void
2522 df_canonize_collection_rec (struct df_collection_rec *collection_rec)
2523 {
2524 df_sort_and_compress_refs (&collection_rec->def_vec);
2525 df_sort_and_compress_refs (&collection_rec->use_vec);
2526 df_sort_and_compress_refs (&collection_rec->eq_use_vec);
2527 df_sort_and_compress_mws (&collection_rec->mw_vec);
2528 }
2529
2530
2531 /* Add the new df_ref to appropriate reg_info/ref_info chains. */
2532
2533 static void
2534 df_install_ref (df_ref this_ref,
2535 struct df_reg_info *reg_info,
2536 struct df_ref_info *ref_info,
2537 bool add_to_table)
2538 {
2539 unsigned int regno = DF_REF_REGNO (this_ref);
2540 /* Add the ref to the reg_{def,use,eq_use} chain. */
2541 df_ref head = reg_info->reg_chain;
2542
2543 reg_info->reg_chain = this_ref;
2544 reg_info->n_refs++;
2545
2546 if (DF_REF_FLAGS_IS_SET (this_ref, DF_HARD_REG_LIVE))
2547 {
2548 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
2549 df->hard_regs_live_count[regno]++;
2550 }
2551
2552 gcc_checking_assert (DF_REF_NEXT_REG (this_ref) == NULL
2553 && DF_REF_PREV_REG (this_ref) == NULL);
2554
2555 DF_REF_NEXT_REG (this_ref) = head;
2556
2557 /* We cannot actually link to the head of the chain. */
2558 DF_REF_PREV_REG (this_ref) = NULL;
2559
2560 if (head)
2561 DF_REF_PREV_REG (head) = this_ref;
2562
2563 if (add_to_table)
2564 {
2565 gcc_assert (ref_info->ref_order != DF_REF_ORDER_NO_TABLE);
2566 df_check_and_grow_ref_info (ref_info, 1);
2567 DF_REF_ID (this_ref) = ref_info->table_size;
2568 /* Add the ref to the big array of defs. */
2569 ref_info->refs[ref_info->table_size] = this_ref;
2570 ref_info->table_size++;
2571 }
2572 else
2573 DF_REF_ID (this_ref) = -1;
2574
2575 ref_info->total_size++;
2576 }
2577
2578
2579 /* This function takes one of the groups of refs (defs, uses or
2580 eq_uses) and installs the entire group into the insn. It also adds
2581 each of these refs into the appropriate chains. */
2582
2583 static df_ref *
2584 df_install_refs (basic_block bb,
2585 VEC(df_ref,stack)* old_vec,
2586 struct df_reg_info **reg_info,
2587 struct df_ref_info *ref_info,
2588 bool is_notes)
2589 {
2590 unsigned int count;
2591
2592 count = VEC_length (df_ref, old_vec);
2593 if (count)
2594 {
2595 df_ref *new_vec = XNEWVEC (df_ref, count + 1);
2596 bool add_to_table;
2597 df_ref this_ref;
2598 unsigned int ix;
2599
2600 switch (ref_info->ref_order)
2601 {
2602 case DF_REF_ORDER_UNORDERED_WITH_NOTES:
2603 case DF_REF_ORDER_BY_REG_WITH_NOTES:
2604 case DF_REF_ORDER_BY_INSN_WITH_NOTES:
2605 ref_info->ref_order = DF_REF_ORDER_UNORDERED_WITH_NOTES;
2606 add_to_table = true;
2607 break;
2608 case DF_REF_ORDER_UNORDERED:
2609 case DF_REF_ORDER_BY_REG:
2610 case DF_REF_ORDER_BY_INSN:
2611 ref_info->ref_order = DF_REF_ORDER_UNORDERED;
2612 add_to_table = !is_notes;
2613 break;
2614 default:
2615 add_to_table = false;
2616 break;
2617 }
2618
2619 /* Do not add if ref is not in the right blocks. */
2620 if (add_to_table && df->analyze_subset)
2621 add_to_table = bitmap_bit_p (df->blocks_to_analyze, bb->index);
2622
2623 FOR_EACH_VEC_ELT (df_ref, old_vec, ix, this_ref)
2624 {
2625 new_vec[ix] = this_ref;
2626 df_install_ref (this_ref, reg_info[DF_REF_REGNO (this_ref)],
2627 ref_info, add_to_table);
2628 }
2629
2630 new_vec[count] = NULL;
2631 return new_vec;
2632 }
2633 else
2634 return df_null_ref_rec;
2635 }
2636
2637
2638 /* This function takes the mws installs the entire group into the
2639 insn. */
2640
2641 static struct df_mw_hardreg **
2642 df_install_mws (VEC(df_mw_hardreg_ptr,stack) *old_vec)
2643 {
2644 unsigned int count;
2645
2646 count = VEC_length (df_mw_hardreg_ptr, old_vec);
2647 if (count)
2648 {
2649 struct df_mw_hardreg **new_vec
2650 = XNEWVEC (struct df_mw_hardreg*, count + 1);
2651 memcpy (new_vec, VEC_address (df_mw_hardreg_ptr, old_vec),
2652 sizeof (struct df_mw_hardreg*) * count);
2653 new_vec[count] = NULL;
2654 return new_vec;
2655 }
2656 else
2657 return df_null_mw_rec;
2658 }
2659
2660
2661 /* Add a chain of df_refs to appropriate ref chain/reg_info/ref_info
2662 chains and update other necessary information. */
2663
2664 static void
2665 df_refs_add_to_chains (struct df_collection_rec *collection_rec,
2666 basic_block bb, rtx insn)
2667 {
2668 if (insn)
2669 {
2670 struct df_insn_info *insn_rec = DF_INSN_INFO_GET (insn);
2671 /* If there is a vector in the collection rec, add it to the
2672 insn. A null rec is a signal that the caller will handle the
2673 chain specially. */
2674 if (collection_rec->def_vec)
2675 {
2676 df_scan_free_ref_vec (insn_rec->defs);
2677 insn_rec->defs
2678 = df_install_refs (bb, collection_rec->def_vec,
2679 df->def_regs,
2680 &df->def_info, false);
2681 }
2682 if (collection_rec->use_vec)
2683 {
2684 df_scan_free_ref_vec (insn_rec->uses);
2685 insn_rec->uses
2686 = df_install_refs (bb, collection_rec->use_vec,
2687 df->use_regs,
2688 &df->use_info, false);
2689 }
2690 if (collection_rec->eq_use_vec)
2691 {
2692 df_scan_free_ref_vec (insn_rec->eq_uses);
2693 insn_rec->eq_uses
2694 = df_install_refs (bb, collection_rec->eq_use_vec,
2695 df->eq_use_regs,
2696 &df->use_info, true);
2697 }
2698 if (collection_rec->mw_vec)
2699 {
2700 df_scan_free_mws_vec (insn_rec->mw_hardregs);
2701 insn_rec->mw_hardregs
2702 = df_install_mws (collection_rec->mw_vec);
2703 }
2704 }
2705 else
2706 {
2707 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
2708
2709 df_scan_free_ref_vec (bb_info->artificial_defs);
2710 bb_info->artificial_defs
2711 = df_install_refs (bb, collection_rec->def_vec,
2712 df->def_regs,
2713 &df->def_info, false);
2714 df_scan_free_ref_vec (bb_info->artificial_uses);
2715 bb_info->artificial_uses
2716 = df_install_refs (bb, collection_rec->use_vec,
2717 df->use_regs,
2718 &df->use_info, false);
2719 }
2720 }
2721
2722
2723 /* Allocate a ref and initialize its fields. */
2724
2725 static df_ref
2726 df_ref_create_structure (enum df_ref_class cl,
2727 struct df_collection_rec *collection_rec,
2728 rtx reg, rtx *loc,
2729 basic_block bb, struct df_insn_info *info,
2730 enum df_ref_type ref_type,
2731 int ref_flags)
2732 {
2733 df_ref this_ref = NULL;
2734 int regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2735 struct df_scan_problem_data *problem_data
2736 = (struct df_scan_problem_data *) df_scan->problem_data;
2737
2738 switch (cl)
2739 {
2740 case DF_REF_BASE:
2741 this_ref = (df_ref) pool_alloc (problem_data->ref_base_pool);
2742 gcc_checking_assert (loc == NULL);
2743 break;
2744
2745 case DF_REF_ARTIFICIAL:
2746 this_ref = (df_ref) pool_alloc (problem_data->ref_artificial_pool);
2747 this_ref->artificial_ref.bb = bb;
2748 gcc_checking_assert (loc == NULL);
2749 break;
2750
2751 case DF_REF_REGULAR:
2752 this_ref = (df_ref) pool_alloc (problem_data->ref_regular_pool);
2753 this_ref->regular_ref.loc = loc;
2754 gcc_checking_assert (loc);
2755 break;
2756 }
2757
2758 DF_REF_CLASS (this_ref) = cl;
2759 DF_REF_ID (this_ref) = -1;
2760 DF_REF_REG (this_ref) = reg;
2761 DF_REF_REGNO (this_ref) = regno;
2762 DF_REF_TYPE (this_ref) = ref_type;
2763 DF_REF_INSN_INFO (this_ref) = info;
2764 DF_REF_CHAIN (this_ref) = NULL;
2765 DF_REF_FLAGS (this_ref) = ref_flags;
2766 DF_REF_NEXT_REG (this_ref) = NULL;
2767 DF_REF_PREV_REG (this_ref) = NULL;
2768 DF_REF_ORDER (this_ref) = df->ref_order++;
2769
2770 /* We need to clear this bit because fwprop, and in the future
2771 possibly other optimizations sometimes create new refs using ond
2772 refs as the model. */
2773 DF_REF_FLAGS_CLEAR (this_ref, DF_HARD_REG_LIVE);
2774
2775 /* See if this ref needs to have DF_HARD_REG_LIVE bit set. */
2776 if ((regno < FIRST_PSEUDO_REGISTER)
2777 && (!DF_REF_IS_ARTIFICIAL (this_ref)))
2778 {
2779 if (DF_REF_REG_DEF_P (this_ref))
2780 {
2781 if (!DF_REF_FLAGS_IS_SET (this_ref, DF_REF_MAY_CLOBBER))
2782 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2783 }
2784 else if (!(TEST_HARD_REG_BIT (elim_reg_set, regno)
2785 && (regno == FRAME_POINTER_REGNUM
2786 || regno == ARG_POINTER_REGNUM)))
2787 DF_REF_FLAGS_SET (this_ref, DF_HARD_REG_LIVE);
2788 }
2789
2790 if (collection_rec)
2791 {
2792 if (DF_REF_REG_DEF_P (this_ref))
2793 VEC_safe_push (df_ref, stack, collection_rec->def_vec, this_ref);
2794 else if (DF_REF_FLAGS (this_ref) & DF_REF_IN_NOTE)
2795 VEC_safe_push (df_ref, stack, collection_rec->eq_use_vec, this_ref);
2796 else
2797 VEC_safe_push (df_ref, stack, collection_rec->use_vec, this_ref);
2798 }
2799
2800 return this_ref;
2801 }
2802
2803
2804 /* Create new references of type DF_REF_TYPE for each part of register REG
2805 at address LOC within INSN of BB. */
2806
2807
2808 static void
2809 df_ref_record (enum df_ref_class cl,
2810 struct df_collection_rec *collection_rec,
2811 rtx reg, rtx *loc,
2812 basic_block bb, struct df_insn_info *insn_info,
2813 enum df_ref_type ref_type,
2814 int ref_flags)
2815 {
2816 unsigned int regno;
2817
2818 gcc_checking_assert (REG_P (reg) || GET_CODE (reg) == SUBREG);
2819
2820 regno = REGNO (GET_CODE (reg) == SUBREG ? SUBREG_REG (reg) : reg);
2821 if (regno < FIRST_PSEUDO_REGISTER)
2822 {
2823 struct df_mw_hardreg *hardreg = NULL;
2824 struct df_scan_problem_data *problem_data
2825 = (struct df_scan_problem_data *) df_scan->problem_data;
2826 unsigned int i;
2827 unsigned int endregno;
2828 df_ref ref;
2829
2830 if (GET_CODE (reg) == SUBREG)
2831 {
2832 regno += subreg_regno_offset (regno, GET_MODE (SUBREG_REG (reg)),
2833 SUBREG_BYTE (reg), GET_MODE (reg));
2834 endregno = regno + subreg_nregs (reg);
2835 }
2836 else
2837 endregno = END_HARD_REGNO (reg);
2838
2839 /* If this is a multiword hardreg, we create some extra
2840 datastructures that will enable us to easily build REG_DEAD
2841 and REG_UNUSED notes. */
2842 if ((endregno != regno + 1) && insn_info)
2843 {
2844 /* Sets to a subreg of a multiword register are partial.
2845 Sets to a non-subreg of a multiword register are not. */
2846 if (GET_CODE (reg) == SUBREG)
2847 ref_flags |= DF_REF_PARTIAL;
2848 ref_flags |= DF_REF_MW_HARDREG;
2849
2850 hardreg = (struct df_mw_hardreg *) pool_alloc (problem_data->mw_reg_pool);
2851 hardreg->type = ref_type;
2852 hardreg->flags = ref_flags;
2853 hardreg->mw_reg = reg;
2854 hardreg->start_regno = regno;
2855 hardreg->end_regno = endregno - 1;
2856 hardreg->mw_order = df->ref_order++;
2857 VEC_safe_push (df_mw_hardreg_ptr, stack, collection_rec->mw_vec,
2858 hardreg);
2859 }
2860
2861 for (i = regno; i < endregno; i++)
2862 {
2863 ref = df_ref_create_structure (cl, collection_rec, regno_reg_rtx[i], loc,
2864 bb, insn_info, ref_type, ref_flags);
2865
2866 gcc_assert (ORIGINAL_REGNO (DF_REF_REG (ref)) == i);
2867 }
2868 }
2869 else
2870 {
2871 df_ref_create_structure (cl, collection_rec, reg, loc, bb, insn_info,
2872 ref_type, ref_flags);
2873 }
2874 }
2875
2876
2877 /* A set to a non-paradoxical SUBREG for which the number of word_mode units
2878 covered by the outer mode is smaller than that covered by the inner mode,
2879 is a read-modify-write operation.
2880 This function returns true iff the SUBREG X is such a SUBREG. */
2881
2882 bool
2883 df_read_modify_subreg_p (rtx x)
2884 {
2885 unsigned int isize, osize;
2886 if (GET_CODE (x) != SUBREG)
2887 return false;
2888 isize = GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)));
2889 osize = GET_MODE_SIZE (GET_MODE (x));
2890 return isize > osize
2891 && isize > REGMODE_NATURAL_SIZE (GET_MODE (SUBREG_REG (x)));
2892 }
2893
2894
2895 /* Process all the registers defined in the rtx, X.
2896 Autoincrement/decrement definitions will be picked up by
2897 df_uses_record. */
2898
2899 static void
2900 df_def_record_1 (struct df_collection_rec *collection_rec,
2901 rtx x, basic_block bb, struct df_insn_info *insn_info,
2902 int flags)
2903 {
2904 rtx *loc;
2905 rtx dst;
2906
2907 /* We may recursively call ourselves on EXPR_LIST when dealing with PARALLEL
2908 construct. */
2909 if (GET_CODE (x) == EXPR_LIST || GET_CODE (x) == CLOBBER)
2910 loc = &XEXP (x, 0);
2911 else
2912 loc = &SET_DEST (x);
2913 dst = *loc;
2914
2915 /* It is legal to have a set destination be a parallel. */
2916 if (GET_CODE (dst) == PARALLEL)
2917 {
2918 int i;
2919
2920 for (i = XVECLEN (dst, 0) - 1; i >= 0; i--)
2921 {
2922 rtx temp = XVECEXP (dst, 0, i);
2923 if (GET_CODE (temp) == EXPR_LIST || GET_CODE (temp) == CLOBBER
2924 || GET_CODE (temp) == SET)
2925 df_def_record_1 (collection_rec,
2926 temp, bb, insn_info,
2927 GET_CODE (temp) == CLOBBER
2928 ? flags | DF_REF_MUST_CLOBBER : flags);
2929 }
2930 return;
2931 }
2932
2933 if (GET_CODE (dst) == STRICT_LOW_PART)
2934 {
2935 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_STRICT_LOW_PART;
2936
2937 loc = &XEXP (dst, 0);
2938 dst = *loc;
2939 }
2940
2941 if (GET_CODE (dst) == ZERO_EXTRACT)
2942 {
2943 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL | DF_REF_ZERO_EXTRACT;
2944
2945 loc = &XEXP (dst, 0);
2946 dst = *loc;
2947 }
2948
2949 /* At this point if we do not have a reg or a subreg, just return. */
2950 if (REG_P (dst))
2951 {
2952 df_ref_record (DF_REF_REGULAR, collection_rec,
2953 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2954
2955 /* We want to keep sp alive everywhere - by making all
2956 writes to sp also use of sp. */
2957 if (REGNO (dst) == STACK_POINTER_REGNUM)
2958 df_ref_record (DF_REF_BASE, collection_rec,
2959 dst, NULL, bb, insn_info, DF_REF_REG_USE, flags);
2960 }
2961 else if (GET_CODE (dst) == SUBREG && REG_P (SUBREG_REG (dst)))
2962 {
2963 if (df_read_modify_subreg_p (dst))
2964 flags |= DF_REF_READ_WRITE | DF_REF_PARTIAL;
2965
2966 flags |= DF_REF_SUBREG;
2967
2968 df_ref_record (DF_REF_REGULAR, collection_rec,
2969 dst, loc, bb, insn_info, DF_REF_REG_DEF, flags);
2970 }
2971 }
2972
2973
2974 /* Process all the registers defined in the pattern rtx, X. */
2975
2976 static void
2977 df_defs_record (struct df_collection_rec *collection_rec,
2978 rtx x, basic_block bb, struct df_insn_info *insn_info,
2979 int flags)
2980 {
2981 RTX_CODE code = GET_CODE (x);
2982
2983 if (code == SET || code == CLOBBER)
2984 {
2985 /* Mark the single def within the pattern. */
2986 int clobber_flags = flags;
2987 clobber_flags |= (code == CLOBBER) ? DF_REF_MUST_CLOBBER : 0;
2988 df_def_record_1 (collection_rec, x, bb, insn_info, clobber_flags);
2989 }
2990 else if (code == COND_EXEC)
2991 {
2992 df_defs_record (collection_rec, COND_EXEC_CODE (x),
2993 bb, insn_info, DF_REF_CONDITIONAL);
2994 }
2995 else if (code == PARALLEL)
2996 {
2997 int i;
2998
2999 /* Mark the multiple defs within the pattern. */
3000 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
3001 df_defs_record (collection_rec, XVECEXP (x, 0, i), bb, insn_info, flags);
3002 }
3003 }
3004
3005
3006 /* Process all the registers used in the rtx at address LOC. */
3007
3008 static void
3009 df_uses_record (struct df_collection_rec *collection_rec,
3010 rtx *loc, enum df_ref_type ref_type,
3011 basic_block bb, struct df_insn_info *insn_info,
3012 int flags)
3013 {
3014 RTX_CODE code;
3015 rtx x;
3016
3017 retry:
3018 x = *loc;
3019 if (!x)
3020 return;
3021 code = GET_CODE (x);
3022 switch (code)
3023 {
3024 case LABEL_REF:
3025 case SYMBOL_REF:
3026 case CONST_INT:
3027 case CONST:
3028 case CONST_DOUBLE:
3029 case CONST_FIXED:
3030 case CONST_VECTOR:
3031 case PC:
3032 case CC0:
3033 case ADDR_VEC:
3034 case ADDR_DIFF_VEC:
3035 return;
3036
3037 case CLOBBER:
3038 /* If we are clobbering a MEM, mark any registers inside the address
3039 as being used. */
3040 if (MEM_P (XEXP (x, 0)))
3041 df_uses_record (collection_rec,
3042 &XEXP (XEXP (x, 0), 0),
3043 DF_REF_REG_MEM_STORE,
3044 bb, insn_info,
3045 flags);
3046
3047 /* If we're clobbering a REG then we have a def so ignore. */
3048 return;
3049
3050 case MEM:
3051 df_uses_record (collection_rec,
3052 &XEXP (x, 0), DF_REF_REG_MEM_LOAD,
3053 bb, insn_info, flags & DF_REF_IN_NOTE);
3054 return;
3055
3056 case SUBREG:
3057 /* While we're here, optimize this case. */
3058 flags |= DF_REF_PARTIAL;
3059 /* In case the SUBREG is not of a REG, do not optimize. */
3060 if (!REG_P (SUBREG_REG (x)))
3061 {
3062 loc = &SUBREG_REG (x);
3063 df_uses_record (collection_rec, loc, ref_type, bb, insn_info, flags);
3064 return;
3065 }
3066 /* ... Fall through ... */
3067
3068 case REG:
3069 df_ref_record (DF_REF_REGULAR, collection_rec,
3070 x, loc, bb, insn_info,
3071 ref_type, flags);
3072 return;
3073
3074 case SIGN_EXTRACT:
3075 case ZERO_EXTRACT:
3076 {
3077 df_uses_record (collection_rec,
3078 &XEXP (x, 1), ref_type, bb, insn_info, flags);
3079 df_uses_record (collection_rec,
3080 &XEXP (x, 2), ref_type, bb, insn_info, flags);
3081
3082 /* If the parameters to the zero or sign extract are
3083 constants, strip them off and recurse, otherwise there is
3084 no information that we can gain from this operation. */
3085 if (code == ZERO_EXTRACT)
3086 flags |= DF_REF_ZERO_EXTRACT;
3087 else
3088 flags |= DF_REF_SIGN_EXTRACT;
3089
3090 df_uses_record (collection_rec,
3091 &XEXP (x, 0), ref_type, bb, insn_info, flags);
3092 return;
3093 }
3094 break;
3095
3096 case SET:
3097 {
3098 rtx dst = SET_DEST (x);
3099 gcc_assert (!(flags & DF_REF_IN_NOTE));
3100 df_uses_record (collection_rec,
3101 &SET_SRC (x), DF_REF_REG_USE, bb, insn_info, flags);
3102
3103 switch (GET_CODE (dst))
3104 {
3105 case SUBREG:
3106 if (df_read_modify_subreg_p (dst))
3107 {
3108 df_uses_record (collection_rec, &SUBREG_REG (dst),
3109 DF_REF_REG_USE, bb, insn_info,
3110 flags | DF_REF_READ_WRITE | DF_REF_SUBREG);
3111 break;
3112 }
3113 /* Fall through. */
3114 case REG:
3115 case PARALLEL:
3116 case SCRATCH:
3117 case PC:
3118 case CC0:
3119 break;
3120 case MEM:
3121 df_uses_record (collection_rec, &XEXP (dst, 0),
3122 DF_REF_REG_MEM_STORE, bb, insn_info, flags);
3123 break;
3124 case STRICT_LOW_PART:
3125 {
3126 rtx *temp = &XEXP (dst, 0);
3127 /* A strict_low_part uses the whole REG and not just the
3128 SUBREG. */
3129 dst = XEXP (dst, 0);
3130 df_uses_record (collection_rec,
3131 (GET_CODE (dst) == SUBREG) ? &SUBREG_REG (dst) : temp,
3132 DF_REF_REG_USE, bb, insn_info,
3133 DF_REF_READ_WRITE | DF_REF_STRICT_LOW_PART);
3134 }
3135 break;
3136 case ZERO_EXTRACT:
3137 {
3138 df_uses_record (collection_rec, &XEXP (dst, 1),
3139 DF_REF_REG_USE, bb, insn_info, flags);
3140 df_uses_record (collection_rec, &XEXP (dst, 2),
3141 DF_REF_REG_USE, bb, insn_info, flags);
3142 if (GET_CODE (XEXP (dst,0)) == MEM)
3143 df_uses_record (collection_rec, &XEXP (dst, 0),
3144 DF_REF_REG_USE, bb, insn_info,
3145 flags);
3146 else
3147 df_uses_record (collection_rec, &XEXP (dst, 0),
3148 DF_REF_REG_USE, bb, insn_info,
3149 DF_REF_READ_WRITE | DF_REF_ZERO_EXTRACT);
3150 }
3151 break;
3152
3153 default:
3154 gcc_unreachable ();
3155 }
3156 return;
3157 }
3158
3159 case RETURN:
3160 break;
3161
3162 case ASM_OPERANDS:
3163 case UNSPEC_VOLATILE:
3164 case TRAP_IF:
3165 case ASM_INPUT:
3166 {
3167 /* Traditional and volatile asm instructions must be
3168 considered to use and clobber all hard registers, all
3169 pseudo-registers and all of memory. So must TRAP_IF and
3170 UNSPEC_VOLATILE operations.
3171
3172 Consider for instance a volatile asm that changes the fpu
3173 rounding mode. An insn should not be moved across this
3174 even if it only uses pseudo-regs because it might give an
3175 incorrectly rounded result.
3176
3177 However, flow.c's liveness computation did *not* do this,
3178 giving the reasoning as " ?!? Unfortunately, marking all
3179 hard registers as live causes massive problems for the
3180 register allocator and marking all pseudos as live creates
3181 mountains of uninitialized variable warnings."
3182
3183 In order to maintain the status quo with regard to liveness
3184 and uses, we do what flow.c did and just mark any regs we
3185 can find in ASM_OPERANDS as used. In global asm insns are
3186 scanned and regs_asm_clobbered is filled out.
3187
3188 For all ASM_OPERANDS, we must traverse the vector of input
3189 operands. We can not just fall through here since then we
3190 would be confused by the ASM_INPUT rtx inside ASM_OPERANDS,
3191 which do not indicate traditional asms unlike their normal
3192 usage. */
3193 if (code == ASM_OPERANDS)
3194 {
3195 int j;
3196
3197 for (j = 0; j < ASM_OPERANDS_INPUT_LENGTH (x); j++)
3198 df_uses_record (collection_rec, &ASM_OPERANDS_INPUT (x, j),
3199 DF_REF_REG_USE, bb, insn_info, flags);
3200 return;
3201 }
3202 break;
3203 }
3204
3205 case VAR_LOCATION:
3206 df_uses_record (collection_rec,
3207 &PAT_VAR_LOCATION_LOC (x),
3208 DF_REF_REG_USE, bb, insn_info, flags);
3209 return;
3210
3211 case PRE_DEC:
3212 case POST_DEC:
3213 case PRE_INC:
3214 case POST_INC:
3215 case PRE_MODIFY:
3216 case POST_MODIFY:
3217 gcc_assert (!DEBUG_INSN_P (insn_info->insn));
3218 /* Catch the def of the register being modified. */
3219 df_ref_record (DF_REF_REGULAR, collection_rec, XEXP (x, 0), &XEXP (x, 0),
3220 bb, insn_info,
3221 DF_REF_REG_DEF,
3222 flags | DF_REF_READ_WRITE | DF_REF_PRE_POST_MODIFY);
3223
3224 /* ... Fall through to handle uses ... */
3225
3226 default:
3227 break;
3228 }
3229
3230 /* Recursively scan the operands of this expression. */
3231 {
3232 const char *fmt = GET_RTX_FORMAT (code);
3233 int i;
3234
3235 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3236 {
3237 if (fmt[i] == 'e')
3238 {
3239 /* Tail recursive case: save a function call level. */
3240 if (i == 0)
3241 {
3242 loc = &XEXP (x, 0);
3243 goto retry;
3244 }
3245 df_uses_record (collection_rec, &XEXP (x, i), ref_type,
3246 bb, insn_info, flags);
3247 }
3248 else if (fmt[i] == 'E')
3249 {
3250 int j;
3251 for (j = 0; j < XVECLEN (x, i); j++)
3252 df_uses_record (collection_rec,
3253 &XVECEXP (x, i, j), ref_type,
3254 bb, insn_info, flags);
3255 }
3256 }
3257 }
3258
3259 return;
3260 }
3261
3262
3263 /* For all DF_REF_CONDITIONAL defs, add a corresponding uses. */
3264
3265 static void
3266 df_get_conditional_uses (struct df_collection_rec *collection_rec)
3267 {
3268 unsigned int ix;
3269 df_ref ref;
3270
3271 FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, ix, ref)
3272 {
3273 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_CONDITIONAL))
3274 {
3275 df_ref use;
3276
3277 use = df_ref_create_structure (DF_REF_CLASS (ref), collection_rec, DF_REF_REG (ref),
3278 DF_REF_LOC (ref), DF_REF_BB (ref),
3279 DF_REF_INSN_INFO (ref), DF_REF_REG_USE,
3280 DF_REF_FLAGS (ref) & ~DF_REF_CONDITIONAL);
3281 DF_REF_REGNO (use) = DF_REF_REGNO (ref);
3282 }
3283 }
3284 }
3285
3286
3287 /* Get call's extra defs and uses. */
3288
3289 static void
3290 df_get_call_refs (struct df_collection_rec * collection_rec,
3291 basic_block bb,
3292 struct df_insn_info *insn_info,
3293 int flags)
3294 {
3295 rtx note;
3296 bitmap_iterator bi;
3297 unsigned int ui;
3298 bool is_sibling_call;
3299 unsigned int i;
3300 df_ref def;
3301 bitmap_head defs_generated;
3302
3303 bitmap_initialize (&defs_generated, &df_bitmap_obstack);
3304
3305 /* Do not generate clobbers for registers that are the result of the
3306 call. This causes ordering problems in the chain building code
3307 depending on which def is seen first. */
3308 FOR_EACH_VEC_ELT (df_ref, collection_rec->def_vec, i, def)
3309 bitmap_set_bit (&defs_generated, DF_REF_REGNO (def));
3310
3311 /* Record the registers used to pass arguments, and explicitly
3312 noted as clobbered. */
3313 for (note = CALL_INSN_FUNCTION_USAGE (insn_info->insn); note;
3314 note = XEXP (note, 1))
3315 {
3316 if (GET_CODE (XEXP (note, 0)) == USE)
3317 df_uses_record (collection_rec, &XEXP (XEXP (note, 0), 0),
3318 DF_REF_REG_USE, bb, insn_info, flags);
3319 else if (GET_CODE (XEXP (note, 0)) == CLOBBER)
3320 {
3321 if (REG_P (XEXP (XEXP (note, 0), 0)))
3322 {
3323 unsigned int regno = REGNO (XEXP (XEXP (note, 0), 0));
3324 if (!bitmap_bit_p (&defs_generated, regno))
3325 df_defs_record (collection_rec, XEXP (note, 0), bb,
3326 insn_info, flags);
3327 }
3328 else
3329 df_uses_record (collection_rec, &XEXP (note, 0),
3330 DF_REF_REG_USE, bb, insn_info, flags);
3331 }
3332 }
3333
3334 /* The stack ptr is used (honorarily) by a CALL insn. */
3335 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[STACK_POINTER_REGNUM],
3336 NULL, bb, insn_info, DF_REF_REG_USE,
3337 DF_REF_CALL_STACK_USAGE | flags);
3338
3339 /* Calls may also reference any of the global registers,
3340 so they are recorded as used. */
3341 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3342 if (global_regs[i])
3343 {
3344 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3345 NULL, bb, insn_info, DF_REF_REG_USE, flags);
3346 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[i],
3347 NULL, bb, insn_info, DF_REF_REG_DEF, flags);
3348 }
3349
3350 is_sibling_call = SIBLING_CALL_P (insn_info->insn);
3351 EXECUTE_IF_SET_IN_BITMAP (regs_invalidated_by_call_regset, 0, ui, bi)
3352 {
3353 if (!global_regs[ui]
3354 && (!bitmap_bit_p (&defs_generated, ui))
3355 && (!is_sibling_call
3356 || !bitmap_bit_p (df->exit_block_uses, ui)
3357 || refers_to_regno_p (ui, ui+1,
3358 crtl->return_rtx, NULL)))
3359 df_ref_record (DF_REF_BASE, collection_rec, regno_reg_rtx[ui],
3360 NULL, bb, insn_info, DF_REF_REG_DEF,
3361 DF_REF_MAY_CLOBBER | flags);
3362 }
3363
3364 bitmap_clear (&defs_generated);
3365 return;
3366 }
3367
3368 /* Collect all refs in the INSN. This function is free of any
3369 side-effect - it will create and return a lists of df_ref's in the
3370 COLLECTION_REC without putting those refs into existing ref chains
3371 and reg chains. */
3372
3373 static void
3374 df_insn_refs_collect (struct df_collection_rec* collection_rec,
3375 basic_block bb, struct df_insn_info *insn_info)
3376 {
3377 rtx note;
3378 bool is_cond_exec = (GET_CODE (PATTERN (insn_info->insn)) == COND_EXEC);
3379
3380 /* Clear out the collection record. */
3381 VEC_truncate (df_ref, collection_rec->def_vec, 0);
3382 VEC_truncate (df_ref, collection_rec->use_vec, 0);
3383 VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
3384 VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3385
3386 /* Record register defs. */
3387 df_defs_record (collection_rec, PATTERN (insn_info->insn), bb, insn_info, 0);
3388
3389 /* Process REG_EQUIV/REG_EQUAL notes. */
3390 for (note = REG_NOTES (insn_info->insn); note;
3391 note = XEXP (note, 1))
3392 {
3393 switch (REG_NOTE_KIND (note))
3394 {
3395 case REG_EQUIV:
3396 case REG_EQUAL:
3397 df_uses_record (collection_rec,
3398 &XEXP (note, 0), DF_REF_REG_USE,
3399 bb, insn_info, DF_REF_IN_NOTE);
3400 break;
3401 case REG_NON_LOCAL_GOTO:
3402 /* The frame ptr is used by a non-local goto. */
3403 df_ref_record (DF_REF_BASE, collection_rec,
3404 regno_reg_rtx[FRAME_POINTER_REGNUM],
3405 NULL, bb, insn_info,
3406 DF_REF_REG_USE, 0);
3407 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3408 df_ref_record (DF_REF_BASE, collection_rec,
3409 regno_reg_rtx[HARD_FRAME_POINTER_REGNUM],
3410 NULL, bb, insn_info,
3411 DF_REF_REG_USE, 0);
3412 #endif
3413 break;
3414 default:
3415 break;
3416 }
3417 }
3418
3419 if (CALL_P (insn_info->insn))
3420 df_get_call_refs (collection_rec, bb, insn_info,
3421 (is_cond_exec) ? DF_REF_CONDITIONAL : 0);
3422
3423 /* Record the register uses. */
3424 df_uses_record (collection_rec,
3425 &PATTERN (insn_info->insn), DF_REF_REG_USE, bb, insn_info, 0);
3426
3427 /* DF_REF_CONDITIONAL needs corresponding USES. */
3428 if (is_cond_exec)
3429 df_get_conditional_uses (collection_rec);
3430
3431 df_canonize_collection_rec (collection_rec);
3432 }
3433
3434 /* Recompute the luids for the insns in BB. */
3435
3436 void
3437 df_recompute_luids (basic_block bb)
3438 {
3439 rtx insn;
3440 int luid = 0;
3441
3442 df_grow_insn_info ();
3443
3444 /* Scan the block an insn at a time from beginning to end. */
3445 FOR_BB_INSNS (bb, insn)
3446 {
3447 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3448 /* Inserting labels does not always trigger the incremental
3449 rescanning. */
3450 if (!insn_info)
3451 {
3452 gcc_assert (!INSN_P (insn));
3453 insn_info = df_insn_create_insn_record (insn);
3454 }
3455
3456 DF_INSN_INFO_LUID (insn_info) = luid;
3457 if (INSN_P (insn))
3458 luid++;
3459 }
3460 }
3461
3462
3463 /* Collect all artificial refs at the block level for BB and add them
3464 to COLLECTION_REC. */
3465
3466 static void
3467 df_bb_refs_collect (struct df_collection_rec *collection_rec, basic_block bb)
3468 {
3469 VEC_truncate (df_ref, collection_rec->def_vec, 0);
3470 VEC_truncate (df_ref, collection_rec->use_vec, 0);
3471 VEC_truncate (df_ref, collection_rec->eq_use_vec, 0);
3472 VEC_truncate (df_mw_hardreg_ptr, collection_rec->mw_vec, 0);
3473
3474 if (bb->index == ENTRY_BLOCK)
3475 {
3476 df_entry_block_defs_collect (collection_rec, df->entry_block_defs);
3477 return;
3478 }
3479 else if (bb->index == EXIT_BLOCK)
3480 {
3481 df_exit_block_uses_collect (collection_rec, df->exit_block_uses);
3482 return;
3483 }
3484
3485 #ifdef EH_RETURN_DATA_REGNO
3486 if (bb_has_eh_pred (bb))
3487 {
3488 unsigned int i;
3489 /* Mark the registers that will contain data for the handler. */
3490 for (i = 0; ; ++i)
3491 {
3492 unsigned regno = EH_RETURN_DATA_REGNO (i);
3493 if (regno == INVALID_REGNUM)
3494 break;
3495 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3496 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3497 }
3498 }
3499 #endif
3500
3501 /* Add the hard_frame_pointer if this block is the target of a
3502 non-local goto. */
3503 if (bb->flags & BB_NON_LOCAL_GOTO_TARGET)
3504 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, hard_frame_pointer_rtx, NULL,
3505 bb, NULL, DF_REF_REG_DEF, DF_REF_AT_TOP);
3506
3507 /* Add the artificial uses. */
3508 if (bb->index >= NUM_FIXED_BLOCKS)
3509 {
3510 bitmap_iterator bi;
3511 unsigned int regno;
3512 bitmap au = bb_has_eh_pred (bb)
3513 ? &df->eh_block_artificial_uses
3514 : &df->regular_block_artificial_uses;
3515
3516 EXECUTE_IF_SET_IN_BITMAP (au, 0, regno, bi)
3517 {
3518 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[regno], NULL,
3519 bb, NULL, DF_REF_REG_USE, 0);
3520 }
3521 }
3522
3523 df_canonize_collection_rec (collection_rec);
3524 }
3525
3526
3527 /* Record all the refs within the basic block BB_INDEX and scan the instructions if SCAN_INSNS. */
3528
3529 void
3530 df_bb_refs_record (int bb_index, bool scan_insns)
3531 {
3532 basic_block bb = BASIC_BLOCK (bb_index);
3533 rtx insn;
3534 int luid = 0;
3535 struct df_collection_rec collection_rec;
3536
3537 if (!df)
3538 return;
3539
3540 df_grow_bb_info (df_scan);
3541 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
3542 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
3543 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
3544 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
3545
3546 if (scan_insns)
3547 /* Scan the block an insn at a time from beginning to end. */
3548 FOR_BB_INSNS (bb, insn)
3549 {
3550 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
3551 gcc_assert (!insn_info);
3552
3553 insn_info = df_insn_create_insn_record (insn);
3554 if (INSN_P (insn))
3555 {
3556 /* Record refs within INSN. */
3557 DF_INSN_INFO_LUID (insn_info) = luid++;
3558 df_insn_refs_collect (&collection_rec, bb, DF_INSN_INFO_GET (insn));
3559 df_refs_add_to_chains (&collection_rec, bb, insn);
3560 }
3561 DF_INSN_INFO_LUID (insn_info) = luid;
3562 }
3563
3564 /* Other block level artificial refs */
3565 df_bb_refs_collect (&collection_rec, bb);
3566 df_refs_add_to_chains (&collection_rec, bb, NULL);
3567
3568 VEC_free (df_ref, stack, collection_rec.def_vec);
3569 VEC_free (df_ref, stack, collection_rec.use_vec);
3570 VEC_free (df_ref, stack, collection_rec.eq_use_vec);
3571 VEC_free (df_mw_hardreg_ptr, stack, collection_rec.mw_vec);
3572
3573 /* Now that the block has been processed, set the block as dirty so
3574 LR and LIVE will get it processed. */
3575 df_set_bb_dirty (bb);
3576 }
3577
3578
3579 /* Get the artificial use set for a regular (i.e. non-exit/non-entry)
3580 block. */
3581
3582 static void
3583 df_get_regular_block_artificial_uses (bitmap regular_block_artificial_uses)
3584 {
3585 #ifdef EH_USES
3586 unsigned int i;
3587 #endif
3588
3589 bitmap_clear (regular_block_artificial_uses);
3590
3591 if (reload_completed)
3592 {
3593 if (frame_pointer_needed)
3594 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3595 }
3596 else
3597 /* Before reload, there are a few registers that must be forced
3598 live everywhere -- which might not already be the case for
3599 blocks within infinite loops. */
3600 {
3601 /* Any reference to any pseudo before reload is a potential
3602 reference of the frame pointer. */
3603 bitmap_set_bit (regular_block_artificial_uses, FRAME_POINTER_REGNUM);
3604
3605 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3606 bitmap_set_bit (regular_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3607 #endif
3608
3609 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3610 /* Pseudos with argument area equivalences may require
3611 reloading via the argument pointer. */
3612 if (fixed_regs[ARG_POINTER_REGNUM])
3613 bitmap_set_bit (regular_block_artificial_uses, ARG_POINTER_REGNUM);
3614 #endif
3615
3616 /* Any constant, or pseudo with constant equivalences, may
3617 require reloading from memory using the pic register. */
3618 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3619 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3620 bitmap_set_bit (regular_block_artificial_uses, PIC_OFFSET_TABLE_REGNUM);
3621 }
3622 /* The all-important stack pointer must always be live. */
3623 bitmap_set_bit (regular_block_artificial_uses, STACK_POINTER_REGNUM);
3624
3625 #ifdef EH_USES
3626 /* EH_USES registers are used:
3627 1) at all insns that might throw (calls or with -fnon-call-exceptions
3628 trapping insns)
3629 2) in all EH edges
3630 3) to support backtraces and/or debugging, anywhere between their
3631 initialization and where they the saved registers are restored
3632 from them, including the cases where we don't reach the epilogue
3633 (noreturn call or infinite loop). */
3634 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3635 if (EH_USES (i))
3636 bitmap_set_bit (regular_block_artificial_uses, i);
3637 #endif
3638 }
3639
3640
3641 /* Get the artificial use set for an eh block. */
3642
3643 static void
3644 df_get_eh_block_artificial_uses (bitmap eh_block_artificial_uses)
3645 {
3646 bitmap_clear (eh_block_artificial_uses);
3647
3648 /* The following code (down thru the arg_pointer setting APPEARS
3649 to be necessary because there is nothing that actually
3650 describes what the exception handling code may actually need
3651 to keep alive. */
3652 if (reload_completed)
3653 {
3654 if (frame_pointer_needed)
3655 {
3656 bitmap_set_bit (eh_block_artificial_uses, FRAME_POINTER_REGNUM);
3657 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3658 bitmap_set_bit (eh_block_artificial_uses, HARD_FRAME_POINTER_REGNUM);
3659 #endif
3660 }
3661 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3662 if (fixed_regs[ARG_POINTER_REGNUM])
3663 bitmap_set_bit (eh_block_artificial_uses, ARG_POINTER_REGNUM);
3664 #endif
3665 }
3666 }
3667
3668
3669 \f
3670 /*----------------------------------------------------------------------------
3671 Specialized hard register scanning functions.
3672 ----------------------------------------------------------------------------*/
3673
3674
3675 /* Mark a register in SET. Hard registers in large modes get all
3676 of their component registers set as well. */
3677
3678 static void
3679 df_mark_reg (rtx reg, void *vset)
3680 {
3681 bitmap set = (bitmap) vset;
3682 int regno = REGNO (reg);
3683
3684 gcc_assert (GET_MODE (reg) != BLKmode);
3685
3686 if (regno < FIRST_PSEUDO_REGISTER)
3687 {
3688 int n = hard_regno_nregs[regno][GET_MODE (reg)];
3689 bitmap_set_range (set, regno, n);
3690 }
3691 else
3692 bitmap_set_bit (set, regno);
3693 }
3694
3695
3696 /* Set the bit for regs that are considered being defined at the entry. */
3697
3698 static void
3699 df_get_entry_block_def_set (bitmap entry_block_defs)
3700 {
3701 rtx r;
3702 int i;
3703
3704 bitmap_clear (entry_block_defs);
3705
3706 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3707 {
3708 if (FUNCTION_ARG_REGNO_P (i))
3709 #ifdef INCOMING_REGNO
3710 bitmap_set_bit (entry_block_defs, INCOMING_REGNO (i));
3711 #else
3712 bitmap_set_bit (entry_block_defs, i);
3713 #endif
3714 }
3715
3716 /* The always important stack pointer. */
3717 bitmap_set_bit (entry_block_defs, STACK_POINTER_REGNUM);
3718
3719 /* Once the prologue has been generated, all of these registers
3720 should just show up in the first regular block. */
3721 if (HAVE_prologue && epilogue_completed)
3722 {
3723 /* Defs for the callee saved registers are inserted so that the
3724 pushes have some defining location. */
3725 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3726 if ((call_used_regs[i] == 0) && (df_regs_ever_live_p (i)))
3727 bitmap_set_bit (entry_block_defs, i);
3728 }
3729
3730 r = targetm.calls.struct_value_rtx (current_function_decl, true);
3731 if (r && REG_P (r))
3732 bitmap_set_bit (entry_block_defs, REGNO (r));
3733
3734 /* If the function has an incoming STATIC_CHAIN, it has to show up
3735 in the entry def set. */
3736 r = targetm.calls.static_chain (current_function_decl, true);
3737 if (r && REG_P (r))
3738 bitmap_set_bit (entry_block_defs, REGNO (r));
3739
3740 if ((!reload_completed) || frame_pointer_needed)
3741 {
3742 /* Any reference to any pseudo before reload is a potential
3743 reference of the frame pointer. */
3744 bitmap_set_bit (entry_block_defs, FRAME_POINTER_REGNUM);
3745 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3746 /* If they are different, also mark the hard frame pointer as live. */
3747 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3748 bitmap_set_bit (entry_block_defs, HARD_FRAME_POINTER_REGNUM);
3749 #endif
3750 }
3751
3752 /* These registers are live everywhere. */
3753 if (!reload_completed)
3754 {
3755 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3756 /* Pseudos with argument area equivalences may require
3757 reloading via the argument pointer. */
3758 if (fixed_regs[ARG_POINTER_REGNUM])
3759 bitmap_set_bit (entry_block_defs, ARG_POINTER_REGNUM);
3760 #endif
3761
3762 #ifdef PIC_OFFSET_TABLE_REGNUM
3763 /* Any constant, or pseudo with constant equivalences, may
3764 require reloading from memory using the pic register. */
3765 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3766 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3767 bitmap_set_bit (entry_block_defs, PIC_OFFSET_TABLE_REGNUM);
3768 #endif
3769 }
3770
3771 #ifdef INCOMING_RETURN_ADDR_RTX
3772 if (REG_P (INCOMING_RETURN_ADDR_RTX))
3773 bitmap_set_bit (entry_block_defs, REGNO (INCOMING_RETURN_ADDR_RTX));
3774 #endif
3775
3776 targetm.extra_live_on_entry (entry_block_defs);
3777 }
3778
3779
3780 /* Return the (conservative) set of hard registers that are defined on
3781 entry to the function.
3782 It uses df->entry_block_defs to determine which register
3783 reference to include. */
3784
3785 static void
3786 df_entry_block_defs_collect (struct df_collection_rec *collection_rec,
3787 bitmap entry_block_defs)
3788 {
3789 unsigned int i;
3790 bitmap_iterator bi;
3791
3792 EXECUTE_IF_SET_IN_BITMAP (entry_block_defs, 0, i, bi)
3793 {
3794 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3795 ENTRY_BLOCK_PTR, NULL, DF_REF_REG_DEF, 0);
3796 }
3797
3798 df_canonize_collection_rec (collection_rec);
3799 }
3800
3801
3802 /* Record the (conservative) set of hard registers that are defined on
3803 entry to the function. */
3804
3805 static void
3806 df_record_entry_block_defs (bitmap entry_block_defs)
3807 {
3808 struct df_collection_rec collection_rec;
3809 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3810 collection_rec.def_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
3811 df_entry_block_defs_collect (&collection_rec, entry_block_defs);
3812
3813 /* Process bb_refs chain */
3814 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (ENTRY_BLOCK), NULL);
3815 VEC_free (df_ref, stack, collection_rec.def_vec);
3816 }
3817
3818
3819 /* Update the defs in the entry block. */
3820
3821 void
3822 df_update_entry_block_defs (void)
3823 {
3824 bitmap_head refs;
3825 bool changed = false;
3826
3827 bitmap_initialize (&refs, &df_bitmap_obstack);
3828 df_get_entry_block_def_set (&refs);
3829 if (df->entry_block_defs)
3830 {
3831 if (!bitmap_equal_p (df->entry_block_defs, &refs))
3832 {
3833 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (ENTRY_BLOCK);
3834 df_ref_chain_delete_du_chain (bb_info->artificial_defs);
3835 df_ref_chain_delete (bb_info->artificial_defs);
3836 bb_info->artificial_defs = NULL;
3837 changed = true;
3838 }
3839 }
3840 else
3841 {
3842 struct df_scan_problem_data *problem_data
3843 = (struct df_scan_problem_data *) df_scan->problem_data;
3844 gcc_unreachable ();
3845 df->entry_block_defs = BITMAP_ALLOC (&problem_data->reg_bitmaps);
3846 changed = true;
3847 }
3848
3849 if (changed)
3850 {
3851 df_record_entry_block_defs (&refs);
3852 bitmap_copy (df->entry_block_defs, &refs);
3853 df_set_bb_dirty (BASIC_BLOCK (ENTRY_BLOCK));
3854 }
3855 bitmap_clear (&refs);
3856 }
3857
3858
3859 /* Set the bit for regs that are considered being used at the exit. */
3860
3861 static void
3862 df_get_exit_block_use_set (bitmap exit_block_uses)
3863 {
3864 unsigned int i;
3865
3866 bitmap_clear (exit_block_uses);
3867
3868 /* Stack pointer is always live at the exit. */
3869 bitmap_set_bit (exit_block_uses, STACK_POINTER_REGNUM);
3870
3871 /* Mark the frame pointer if needed at the end of the function.
3872 If we end up eliminating it, it will be removed from the live
3873 list of each basic block by reload. */
3874
3875 if ((!reload_completed) || frame_pointer_needed)
3876 {
3877 bitmap_set_bit (exit_block_uses, FRAME_POINTER_REGNUM);
3878 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
3879 /* If they are different, also mark the hard frame pointer as live. */
3880 if (!LOCAL_REGNO (HARD_FRAME_POINTER_REGNUM))
3881 bitmap_set_bit (exit_block_uses, HARD_FRAME_POINTER_REGNUM);
3882 #endif
3883 }
3884
3885 /* Many architectures have a GP register even without flag_pic.
3886 Assume the pic register is not in use, or will be handled by
3887 other means, if it is not fixed. */
3888 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
3889 && (unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
3890 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
3891 bitmap_set_bit (exit_block_uses, PIC_OFFSET_TABLE_REGNUM);
3892
3893 /* Mark all global registers, and all registers used by the
3894 epilogue as being live at the end of the function since they
3895 may be referenced by our caller. */
3896 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3897 if (global_regs[i] || EPILOGUE_USES (i))
3898 bitmap_set_bit (exit_block_uses, i);
3899
3900 if (HAVE_epilogue && epilogue_completed)
3901 {
3902 /* Mark all call-saved registers that we actually used. */
3903 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3904 if (df_regs_ever_live_p (i) && !LOCAL_REGNO (i)
3905 && !TEST_HARD_REG_BIT (regs_invalidated_by_call, i))
3906 bitmap_set_bit (exit_block_uses, i);
3907 }
3908
3909 #ifdef EH_RETURN_DATA_REGNO
3910 /* Mark the registers that will contain data for the handler. */
3911 if (reload_completed && crtl->calls_eh_return)
3912 for (i = 0; ; ++i)
3913 {
3914 unsigned regno = EH_RETURN_DATA_REGNO (i);
3915 if (regno == INVALID_REGNUM)
3916 break;
3917 bitmap_set_bit (exit_block_uses, regno);
3918 }
3919 #endif
3920
3921 #ifdef EH_RETURN_STACKADJ_RTX
3922 if ((!HAVE_epilogue || ! epilogue_completed)
3923 && crtl->calls_eh_return)
3924 {
3925 rtx tmp = EH_RETURN_STACKADJ_RTX;
3926 if (tmp && REG_P (tmp))
3927 df_mark_reg (tmp, exit_block_uses);
3928 }
3929 #endif
3930
3931 #ifdef EH_RETURN_HANDLER_RTX
3932 if ((!HAVE_epilogue || ! epilogue_completed)
3933 && crtl->calls_eh_return)
3934 {
3935 rtx tmp = EH_RETURN_HANDLER_RTX;
3936 if (tmp && REG_P (tmp))
3937 df_mark_reg (tmp, exit_block_uses);
3938 }
3939 #endif
3940
3941 /* Mark function return value. */
3942 diddle_return_value (df_mark_reg, (void*) exit_block_uses);
3943 }
3944
3945
3946 /* Return the refs of hard registers that are used in the exit block.
3947 It uses df->exit_block_uses to determine register to include. */
3948
3949 static void
3950 df_exit_block_uses_collect (struct df_collection_rec *collection_rec, bitmap exit_block_uses)
3951 {
3952 unsigned int i;
3953 bitmap_iterator bi;
3954
3955 EXECUTE_IF_SET_IN_BITMAP (exit_block_uses, 0, i, bi)
3956 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[i], NULL,
3957 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
3958
3959 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3960 /* It is deliberate that this is not put in the exit block uses but
3961 I do not know why. */
3962 if (reload_completed
3963 && !bitmap_bit_p (exit_block_uses, ARG_POINTER_REGNUM)
3964 && bb_has_eh_pred (EXIT_BLOCK_PTR)
3965 && fixed_regs[ARG_POINTER_REGNUM])
3966 df_ref_record (DF_REF_ARTIFICIAL, collection_rec, regno_reg_rtx[ARG_POINTER_REGNUM], NULL,
3967 EXIT_BLOCK_PTR, NULL, DF_REF_REG_USE, 0);
3968 #endif
3969
3970 df_canonize_collection_rec (collection_rec);
3971 }
3972
3973
3974 /* Record the set of hard registers that are used in the exit block.
3975 It uses df->exit_block_uses to determine which bit to include. */
3976
3977 static void
3978 df_record_exit_block_uses (bitmap exit_block_uses)
3979 {
3980 struct df_collection_rec collection_rec;
3981 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
3982 collection_rec.use_vec = VEC_alloc (df_ref, stack, FIRST_PSEUDO_REGISTER);
3983
3984 df_exit_block_uses_collect (&collection_rec, exit_block_uses);
3985
3986 /* Process bb_refs chain */
3987 df_refs_add_to_chains (&collection_rec, BASIC_BLOCK (EXIT_BLOCK), NULL);
3988 VEC_free (df_ref, stack, collection_rec.use_vec);
3989 }
3990
3991
3992 /* Update the uses in the exit block. */
3993
3994 void
3995 df_update_exit_block_uses (void)
3996 {
3997 bitmap_head refs;
3998 bool changed = false;
3999
4000 bitmap_initialize (&refs, &df_bitmap_obstack);
4001 df_get_exit_block_use_set (&refs);
4002 if (df->exit_block_uses)
4003 {
4004 if (!bitmap_equal_p (df->exit_block_uses, &refs))
4005 {
4006 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (EXIT_BLOCK);
4007 df_ref_chain_delete_du_chain (bb_info->artificial_uses);
4008 df_ref_chain_delete (bb_info->artificial_uses);
4009 bb_info->artificial_uses = NULL;
4010 changed = true;
4011 }
4012 }
4013 else
4014 {
4015 struct df_scan_problem_data *problem_data
4016 = (struct df_scan_problem_data *) df_scan->problem_data;
4017 gcc_unreachable ();
4018 df->exit_block_uses = BITMAP_ALLOC (&problem_data->reg_bitmaps);
4019 changed = true;
4020 }
4021
4022 if (changed)
4023 {
4024 df_record_exit_block_uses (&refs);
4025 bitmap_copy (df->exit_block_uses,& refs);
4026 df_set_bb_dirty (BASIC_BLOCK (EXIT_BLOCK));
4027 }
4028 bitmap_clear (&refs);
4029 }
4030
4031 static bool initialized = false;
4032
4033
4034 /* Initialize some platform specific structures. */
4035
4036 void
4037 df_hard_reg_init (void)
4038 {
4039 #ifdef ELIMINABLE_REGS
4040 int i;
4041 static const struct {const int from, to; } eliminables[] = ELIMINABLE_REGS;
4042 #endif
4043 if (initialized)
4044 return;
4045
4046 /* Record which registers will be eliminated. We use this in
4047 mark_used_regs. */
4048 CLEAR_HARD_REG_SET (elim_reg_set);
4049
4050 #ifdef ELIMINABLE_REGS
4051 for (i = 0; i < (int) ARRAY_SIZE (eliminables); i++)
4052 SET_HARD_REG_BIT (elim_reg_set, eliminables[i].from);
4053 #else
4054 SET_HARD_REG_BIT (elim_reg_set, FRAME_POINTER_REGNUM);
4055 #endif
4056
4057 initialized = true;
4058 }
4059
4060
4061 /* Recompute the parts of scanning that are based on regs_ever_live
4062 because something changed in that array. */
4063
4064 void
4065 df_update_entry_exit_and_calls (void)
4066 {
4067 basic_block bb;
4068
4069 df_update_entry_block_defs ();
4070 df_update_exit_block_uses ();
4071
4072 /* The call insns need to be rescanned because there may be changes
4073 in the set of registers clobbered across the call. */
4074 FOR_EACH_BB (bb)
4075 {
4076 rtx insn;
4077 FOR_BB_INSNS (bb, insn)
4078 {
4079 if (INSN_P (insn) && CALL_P (insn))
4080 df_insn_rescan (insn);
4081 }
4082 }
4083 }
4084
4085
4086 /* Return true if hard REG is actually used in the some instruction.
4087 There are a fair number of conditions that affect the setting of
4088 this array. See the comment in df.h for df->hard_regs_live_count
4089 for the conditions that this array is set. */
4090
4091 bool
4092 df_hard_reg_used_p (unsigned int reg)
4093 {
4094 return df->hard_regs_live_count[reg] != 0;
4095 }
4096
4097
4098 /* A count of the number of times REG is actually used in the some
4099 instruction. There are a fair number of conditions that affect the
4100 setting of this array. See the comment in df.h for
4101 df->hard_regs_live_count for the conditions that this array is
4102 set. */
4103
4104
4105 unsigned int
4106 df_hard_reg_used_count (unsigned int reg)
4107 {
4108 return df->hard_regs_live_count[reg];
4109 }
4110
4111
4112 /* Get the value of regs_ever_live[REGNO]. */
4113
4114 bool
4115 df_regs_ever_live_p (unsigned int regno)
4116 {
4117 return regs_ever_live[regno];
4118 }
4119
4120
4121 /* Set regs_ever_live[REGNO] to VALUE. If this cause regs_ever_live
4122 to change, schedule that change for the next update. */
4123
4124 void
4125 df_set_regs_ever_live (unsigned int regno, bool value)
4126 {
4127 if (regs_ever_live[regno] == value)
4128 return;
4129
4130 regs_ever_live[regno] = value;
4131 if (df)
4132 df->redo_entry_and_exit = true;
4133 }
4134
4135
4136 /* Compute "regs_ever_live" information from the underlying df
4137 information. Set the vector to all false if RESET. */
4138
4139 void
4140 df_compute_regs_ever_live (bool reset)
4141 {
4142 unsigned int i;
4143 bool changed = df->redo_entry_and_exit;
4144
4145 if (reset)
4146 memset (regs_ever_live, 0, sizeof (regs_ever_live));
4147
4148 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
4149 if ((!regs_ever_live[i]) && df_hard_reg_used_p (i))
4150 {
4151 regs_ever_live[i] = true;
4152 changed = true;
4153 }
4154 if (changed)
4155 df_update_entry_exit_and_calls ();
4156 df->redo_entry_and_exit = false;
4157 }
4158
4159 \f
4160 /*----------------------------------------------------------------------------
4161 Dataflow ref information verification functions.
4162
4163 df_reg_chain_mark (refs, regno, is_def, is_eq_use)
4164 df_reg_chain_verify_unmarked (refs)
4165 df_refs_verify (VEC(stack,df_ref)*, ref*, bool)
4166 df_mws_verify (mw*, mw*, bool)
4167 df_insn_refs_verify (collection_rec, bb, insn, bool)
4168 df_bb_refs_verify (bb, refs, bool)
4169 df_bb_verify (bb)
4170 df_exit_block_bitmap_verify (bool)
4171 df_entry_block_bitmap_verify (bool)
4172 df_scan_verify ()
4173 ----------------------------------------------------------------------------*/
4174
4175
4176 /* Mark all refs in the reg chain. Verify that all of the registers
4177 are in the correct chain. */
4178
4179 static unsigned int
4180 df_reg_chain_mark (df_ref refs, unsigned int regno,
4181 bool is_def, bool is_eq_use)
4182 {
4183 unsigned int count = 0;
4184 df_ref ref;
4185 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4186 {
4187 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4188
4189 /* If there are no def-use or use-def chains, make sure that all
4190 of the chains are clear. */
4191 if (!df_chain)
4192 gcc_assert (!DF_REF_CHAIN (ref));
4193
4194 /* Check to make sure the ref is in the correct chain. */
4195 gcc_assert (DF_REF_REGNO (ref) == regno);
4196 if (is_def)
4197 gcc_assert (DF_REF_REG_DEF_P (ref));
4198 else
4199 gcc_assert (!DF_REF_REG_DEF_P (ref));
4200
4201 if (is_eq_use)
4202 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE));
4203 else
4204 gcc_assert ((DF_REF_FLAGS (ref) & DF_REF_IN_NOTE) == 0);
4205
4206 if (DF_REF_NEXT_REG (ref))
4207 gcc_assert (DF_REF_PREV_REG (DF_REF_NEXT_REG (ref)) == ref);
4208 count++;
4209 DF_REF_REG_MARK (ref);
4210 }
4211 return count;
4212 }
4213
4214
4215 /* Verify that all of the registers in the chain are unmarked. */
4216
4217 static void
4218 df_reg_chain_verify_unmarked (df_ref refs)
4219 {
4220 df_ref ref;
4221 for (ref = refs; ref; ref = DF_REF_NEXT_REG (ref))
4222 gcc_assert (!DF_REF_IS_REG_MARKED (ref));
4223 }
4224
4225
4226 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4227
4228 static bool
4229 df_refs_verify (VEC(df_ref,stack) *new_rec, df_ref *old_rec,
4230 bool abort_if_fail)
4231 {
4232 unsigned int ix;
4233 df_ref new_ref;
4234
4235 FOR_EACH_VEC_ELT (df_ref, new_rec, ix, new_ref)
4236 {
4237 if (*old_rec == NULL || !df_ref_equal_p (new_ref, *old_rec))
4238 {
4239 if (abort_if_fail)
4240 gcc_assert (0);
4241 else
4242 return false;
4243 }
4244
4245 /* Abort if fail is called from the function level verifier. If
4246 that is the context, mark this reg as being seem. */
4247 if (abort_if_fail)
4248 {
4249 gcc_assert (DF_REF_IS_REG_MARKED (*old_rec));
4250 DF_REF_REG_UNMARK (*old_rec);
4251 }
4252
4253 old_rec++;
4254 }
4255
4256 if (abort_if_fail)
4257 gcc_assert (*old_rec == NULL);
4258 else
4259 return *old_rec == NULL;
4260 return false;
4261 }
4262
4263
4264 /* Verify that NEW_REC and OLD_REC have exactly the same members. */
4265
4266 static bool
4267 df_mws_verify (VEC(df_mw_hardreg_ptr,stack) *new_rec,
4268 struct df_mw_hardreg **old_rec,
4269 bool abort_if_fail)
4270 {
4271 unsigned int ix;
4272 struct df_mw_hardreg *new_reg;
4273
4274 FOR_EACH_VEC_ELT (df_mw_hardreg_ptr, new_rec, ix, new_reg)
4275 {
4276 if (*old_rec == NULL || !df_mw_equal_p (new_reg, *old_rec))
4277 {
4278 if (abort_if_fail)
4279 gcc_assert (0);
4280 else
4281 return false;
4282 }
4283 old_rec++;
4284 }
4285
4286 if (abort_if_fail)
4287 gcc_assert (*old_rec == NULL);
4288 else
4289 return *old_rec == NULL;
4290 return false;
4291 }
4292
4293
4294 /* Return true if the existing insn refs information is complete and
4295 correct. Otherwise (i.e. if there's any missing or extra refs),
4296 return the correct df_ref chain in REFS_RETURN.
4297
4298 If ABORT_IF_FAIL, leave the refs that are verified (already in the
4299 ref chain) as DF_REF_MARKED(). If it's false, then it's a per-insn
4300 verification mode instead of the whole function, so unmark
4301 everything.
4302
4303 If ABORT_IF_FAIL is set, this function never returns false. */
4304
4305 static bool
4306 df_insn_refs_verify (struct df_collection_rec *collection_rec,
4307 basic_block bb,
4308 rtx insn,
4309 bool abort_if_fail)
4310 {
4311 bool ret1, ret2, ret3, ret4;
4312 unsigned int uid = INSN_UID (insn);
4313 struct df_insn_info *insn_info = DF_INSN_INFO_GET (insn);
4314
4315 df_insn_refs_collect (collection_rec, bb, insn_info);
4316
4317 if (!DF_INSN_UID_DEFS (uid))
4318 {
4319 /* The insn_rec was created but it was never filled out. */
4320 if (abort_if_fail)
4321 gcc_assert (0);
4322 else
4323 return false;
4324 }
4325
4326 /* Unfortunately we cannot opt out early if one of these is not
4327 right because the marks will not get cleared. */
4328 ret1 = df_refs_verify (collection_rec->def_vec, DF_INSN_UID_DEFS (uid),
4329 abort_if_fail);
4330 ret2 = df_refs_verify (collection_rec->use_vec, DF_INSN_UID_USES (uid),
4331 abort_if_fail);
4332 ret3 = df_refs_verify (collection_rec->eq_use_vec, DF_INSN_UID_EQ_USES (uid),
4333 abort_if_fail);
4334 ret4 = df_mws_verify (collection_rec->mw_vec, DF_INSN_UID_MWS (uid),
4335 abort_if_fail);
4336 return (ret1 && ret2 && ret3 && ret4);
4337 }
4338
4339
4340 /* Return true if all refs in the basic block are correct and complete.
4341 Due to df_ref_chain_verify, it will cause all refs
4342 that are verified to have DF_REF_MARK bit set. */
4343
4344 static bool
4345 df_bb_verify (basic_block bb)
4346 {
4347 rtx insn;
4348 struct df_scan_bb_info *bb_info = df_scan_get_bb_info (bb->index);
4349 struct df_collection_rec collection_rec;
4350
4351 memset (&collection_rec, 0, sizeof (struct df_collection_rec));
4352 collection_rec.def_vec = VEC_alloc (df_ref, stack, 128);
4353 collection_rec.use_vec = VEC_alloc (df_ref, stack, 32);
4354 collection_rec.eq_use_vec = VEC_alloc (df_ref, stack, 32);
4355 collection_rec.mw_vec = VEC_alloc (df_mw_hardreg_ptr, stack, 32);
4356
4357 gcc_assert (bb_info);
4358
4359 /* Scan the block, one insn at a time, from beginning to end. */
4360 FOR_BB_INSNS_REVERSE (bb, insn)
4361 {
4362 if (!INSN_P (insn))
4363 continue;
4364 df_insn_refs_verify (&collection_rec, bb, insn, true);
4365 df_free_collection_rec (&collection_rec);
4366 }
4367
4368 /* Do the artificial defs and uses. */
4369 df_bb_refs_collect (&collection_rec, bb);
4370 df_refs_verify (collection_rec.def_vec, df_get_artificial_defs (bb->index), true);
4371 df_refs_verify (collection_rec.use_vec, df_get_artificial_uses (bb->index), true);
4372 df_free_collection_rec (&collection_rec);
4373
4374 return true;
4375 }
4376
4377
4378 /* Returns true if the entry block has correct and complete df_ref set.
4379 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4380
4381 static bool
4382 df_entry_block_bitmap_verify (bool abort_if_fail)
4383 {
4384 bitmap_head entry_block_defs;
4385 bool is_eq;
4386
4387 bitmap_initialize (&entry_block_defs, &df_bitmap_obstack);
4388 df_get_entry_block_def_set (&entry_block_defs);
4389
4390 is_eq = bitmap_equal_p (&entry_block_defs, df->entry_block_defs);
4391
4392 if (!is_eq && abort_if_fail)
4393 {
4394 print_current_pass (stderr);
4395 fprintf (stderr, "entry_block_defs = ");
4396 df_print_regset (stderr, &entry_block_defs);
4397 fprintf (stderr, "df->entry_block_defs = ");
4398 df_print_regset (stderr, df->entry_block_defs);
4399 gcc_assert (0);
4400 }
4401
4402 bitmap_clear (&entry_block_defs);
4403
4404 return is_eq;
4405 }
4406
4407
4408 /* Returns true if the exit block has correct and complete df_ref set.
4409 If not it either aborts if ABORT_IF_FAIL is true or returns false. */
4410
4411 static bool
4412 df_exit_block_bitmap_verify (bool abort_if_fail)
4413 {
4414 bitmap_head exit_block_uses;
4415 bool is_eq;
4416
4417 bitmap_initialize (&exit_block_uses, &df_bitmap_obstack);
4418 df_get_exit_block_use_set (&exit_block_uses);
4419
4420 is_eq = bitmap_equal_p (&exit_block_uses, df->exit_block_uses);
4421
4422 if (!is_eq && abort_if_fail)
4423 {
4424 print_current_pass (stderr);
4425 fprintf (stderr, "exit_block_uses = ");
4426 df_print_regset (stderr, &exit_block_uses);
4427 fprintf (stderr, "df->exit_block_uses = ");
4428 df_print_regset (stderr, df->exit_block_uses);
4429 gcc_assert (0);
4430 }
4431
4432 bitmap_clear (&exit_block_uses);
4433
4434 return is_eq;
4435 }
4436
4437
4438 /* Return true if df_ref information for all insns in all blocks are
4439 correct and complete. */
4440
4441 void
4442 df_scan_verify (void)
4443 {
4444 unsigned int i;
4445 basic_block bb;
4446 bitmap_head regular_block_artificial_uses;
4447 bitmap_head eh_block_artificial_uses;
4448
4449 if (!df)
4450 return;
4451
4452 /* Verification is a 4 step process. */
4453
4454 /* (1) All of the refs are marked by going thru the reg chains. */
4455 for (i = 0; i < DF_REG_SIZE (df); i++)
4456 {
4457 gcc_assert (df_reg_chain_mark (DF_REG_DEF_CHAIN (i), i, true, false)
4458 == DF_REG_DEF_COUNT(i));
4459 gcc_assert (df_reg_chain_mark (DF_REG_USE_CHAIN (i), i, false, false)
4460 == DF_REG_USE_COUNT(i));
4461 gcc_assert (df_reg_chain_mark (DF_REG_EQ_USE_CHAIN (i), i, false, true)
4462 == DF_REG_EQ_USE_COUNT(i));
4463 }
4464
4465 /* (2) There are various bitmaps whose value may change over the
4466 course of the compilation. This step recomputes them to make
4467 sure that they have not slipped out of date. */
4468 bitmap_initialize (&regular_block_artificial_uses, &df_bitmap_obstack);
4469 bitmap_initialize (&eh_block_artificial_uses, &df_bitmap_obstack);
4470
4471 df_get_regular_block_artificial_uses (&regular_block_artificial_uses);
4472 df_get_eh_block_artificial_uses (&eh_block_artificial_uses);
4473
4474 bitmap_ior_into (&eh_block_artificial_uses,
4475 &regular_block_artificial_uses);
4476
4477 /* Check artificial_uses bitmaps didn't change. */
4478 gcc_assert (bitmap_equal_p (&regular_block_artificial_uses,
4479 &df->regular_block_artificial_uses));
4480 gcc_assert (bitmap_equal_p (&eh_block_artificial_uses,
4481 &df->eh_block_artificial_uses));
4482
4483 bitmap_clear (&regular_block_artificial_uses);
4484 bitmap_clear (&eh_block_artificial_uses);
4485
4486 /* Verify entry block and exit block. These only verify the bitmaps,
4487 the refs are verified in df_bb_verify. */
4488 df_entry_block_bitmap_verify (true);
4489 df_exit_block_bitmap_verify (true);
4490
4491 /* (3) All of the insns in all of the blocks are traversed and the
4492 marks are cleared both in the artificial refs attached to the
4493 blocks and the real refs inside the insns. It is a failure to
4494 clear a mark that has not been set as this means that the ref in
4495 the block or insn was not in the reg chain. */
4496
4497 FOR_ALL_BB (bb)
4498 df_bb_verify (bb);
4499
4500 /* (4) See if all reg chains are traversed a second time. This time
4501 a check is made that the marks are clear. A set mark would be a
4502 from a reg that is not in any insn or basic block. */
4503
4504 for (i = 0; i < DF_REG_SIZE (df); i++)
4505 {
4506 df_reg_chain_verify_unmarked (DF_REG_DEF_CHAIN (i));
4507 df_reg_chain_verify_unmarked (DF_REG_USE_CHAIN (i));
4508 df_reg_chain_verify_unmarked (DF_REG_EQ_USE_CHAIN (i));
4509 }
4510 }