]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/profile.c
Merge tree-ssa-20020619-branch into mainline.
[thirdparty/gcc.git] / gcc / profile.c
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5 based on some ideas from Dain Samples of UC Berkeley.
6 Further mangling by Bob Manson, Cygnus Support.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 02111-1307, USA. */
24
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
41
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
45
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
48
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
51
52 #include "config.h"
53 #include "system.h"
54 #include "coretypes.h"
55 #include "tm.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "output.h"
59 #include "regs.h"
60 #include "expr.h"
61 #include "function.h"
62 #include "toplev.h"
63 #include "coverage.h"
64 #include "value-prof.h"
65 #include "tree.h"
66 #include "cfghooks.h"
67 #include "tree-flow.h"
68
69 /* Hooks for profiling. */
70 static struct profile_hooks* profile_hooks;
71
72 /* File for profiling debug output. */
73 static inline FILE*
74 profile_dump_file (void) {
75 return profile_hooks->profile_dump_file ();
76 }
77
78 /* Additional information about the edges we need. */
79 struct edge_info {
80 unsigned int count_valid : 1;
81
82 /* Is on the spanning tree. */
83 unsigned int on_tree : 1;
84
85 /* Pretend this edge does not exist (it is abnormal and we've
86 inserted a fake to compensate). */
87 unsigned int ignore : 1;
88 };
89
90 struct bb_info {
91 unsigned int count_valid : 1;
92
93 /* Number of successor and predecessor edges. */
94 gcov_type succ_count;
95 gcov_type pred_count;
96 };
97
98 #define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
99 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
100
101 /* Counter summary from the last set of coverage counts read. */
102
103 const struct gcov_ctr_summary *profile_info;
104
105 /* Collect statistics on the performance of this pass for the entire source
106 file. */
107
108 static int total_num_blocks;
109 static int total_num_edges;
110 static int total_num_edges_ignored;
111 static int total_num_edges_instrumented;
112 static int total_num_blocks_created;
113 static int total_num_passes;
114 static int total_num_times_called;
115 static int total_hist_br_prob[20];
116 static int total_num_never_executed;
117 static int total_num_branches;
118
119 /* Forward declarations. */
120 static void find_spanning_tree (struct edge_list *);
121 static unsigned instrument_edges (struct edge_list *);
122 static void instrument_values (unsigned, struct histogram_value *);
123 static void compute_branch_probabilities (void);
124 static void compute_value_histograms (unsigned, struct histogram_value *);
125 static gcov_type * get_exec_counts (void);
126 static basic_block find_group (basic_block);
127 static void union_groups (basic_block, basic_block);
128
129 \f
130 /* Add edge instrumentation code to the entire insn chain.
131
132 F is the first insn of the chain.
133 NUM_BLOCKS is the number of basic blocks found in F. */
134
135 static unsigned
136 instrument_edges (struct edge_list *el)
137 {
138 unsigned num_instr_edges = 0;
139 int num_edges = NUM_EDGES (el);
140 basic_block bb;
141
142 remove_fake_edges ();
143
144 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
145 {
146 edge e;
147
148 for (e = bb->succ; e; e = e->succ_next)
149 {
150 struct edge_info *inf = EDGE_INFO (e);
151
152 if (!inf->ignore && !inf->on_tree)
153 {
154 if (e->flags & EDGE_ABNORMAL)
155 abort ();
156 if (dump_file)
157 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
158 e->src->index, e->dest->index,
159 EDGE_CRITICAL_P (e) ? " (and split)" : "");
160 (profile_hooks->gen_edge_profiler) (num_instr_edges++, e);
161 }
162 }
163 }
164
165 total_num_blocks_created += num_edges;
166 if (dump_file)
167 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
168 return num_instr_edges;
169 }
170
171 /* Add code to measure histograms list of VALUES of length N_VALUES. */
172 static void
173 instrument_values (unsigned n_values, struct histogram_value *values)
174 {
175 unsigned i, t;
176
177 /* Emit code to generate the histograms before the insns. */
178
179 for (i = 0; i < n_values; i++)
180 {
181 switch (values[i].type)
182 {
183 case HIST_TYPE_INTERVAL:
184 t = GCOV_COUNTER_V_INTERVAL;
185 break;
186
187 case HIST_TYPE_POW2:
188 t = GCOV_COUNTER_V_POW2;
189 break;
190
191 case HIST_TYPE_SINGLE_VALUE:
192 t = GCOV_COUNTER_V_SINGLE;
193 break;
194
195 case HIST_TYPE_CONST_DELTA:
196 t = GCOV_COUNTER_V_DELTA;
197 break;
198
199 default:
200 abort ();
201 }
202 if (!coverage_counter_alloc (t, values[i].n_counters))
203 continue;
204
205 switch (values[i].type)
206 {
207 case HIST_TYPE_INTERVAL:
208 (profile_hooks->gen_interval_profiler) (values + i, t, 0);
209 break;
210
211 case HIST_TYPE_POW2:
212 (profile_hooks->gen_pow2_profiler) (values + i, t, 0);
213 break;
214
215 case HIST_TYPE_SINGLE_VALUE:
216 (profile_hooks->gen_one_value_profiler) (values + i, t, 0);
217 break;
218
219 case HIST_TYPE_CONST_DELTA:
220 (profile_hooks->gen_const_delta_profiler) (values + i, t, 0);
221 break;
222
223 default:
224 abort ();
225 }
226 }
227 }
228 \f
229
230 /* Computes hybrid profile for all matching entries in da_file. */
231
232 static gcov_type *
233 get_exec_counts (void)
234 {
235 unsigned num_edges = 0;
236 basic_block bb;
237 gcov_type *counts;
238
239 /* Count the edges to be (possibly) instrumented. */
240 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
241 {
242 edge e;
243 for (e = bb->succ; e; e = e->succ_next)
244 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
245 num_edges++;
246 }
247
248 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, &profile_info);
249 if (!counts)
250 return NULL;
251
252 if (dump_file && profile_info)
253 fprintf(dump_file, "Merged %u profiles with maximal count %u.\n",
254 profile_info->runs, (unsigned) profile_info->sum_max);
255
256 return counts;
257 }
258 \f
259
260 /* Compute the branch probabilities for the various branches.
261 Annotate them accordingly. */
262
263 static void
264 compute_branch_probabilities (void)
265 {
266 basic_block bb;
267 int i;
268 int num_edges = 0;
269 int changes;
270 int passes;
271 int hist_br_prob[20];
272 int num_never_executed;
273 int num_branches;
274 gcov_type *exec_counts = get_exec_counts ();
275 int exec_counts_pos = 0;
276
277 /* Very simple sanity checks so we catch bugs in our profiling code. */
278 if (profile_info)
279 {
280 if (profile_info->run_max * profile_info->runs < profile_info->sum_max)
281 {
282 error ("corrupted profile info: run_max * runs < sum_max");
283 exec_counts = NULL;
284 }
285
286 if (profile_info->sum_all < profile_info->sum_max)
287 {
288 error ("corrupted profile info: sum_all is smaller than sum_max");
289 exec_counts = NULL;
290 }
291 }
292
293 /* Attach extra info block to each bb. */
294
295 alloc_aux_for_blocks (sizeof (struct bb_info));
296 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
297 {
298 edge e;
299
300 for (e = bb->succ; e; e = e->succ_next)
301 if (!EDGE_INFO (e)->ignore)
302 BB_INFO (bb)->succ_count++;
303 for (e = bb->pred; e; e = e->pred_next)
304 if (!EDGE_INFO (e)->ignore)
305 BB_INFO (bb)->pred_count++;
306 }
307
308 /* Avoid predicting entry on exit nodes. */
309 BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
310 BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
311
312 /* For each edge not on the spanning tree, set its execution count from
313 the .da file. */
314
315 /* The first count in the .da file is the number of times that the function
316 was entered. This is the exec_count for block zero. */
317
318 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
319 {
320 edge e;
321 for (e = bb->succ; e; e = e->succ_next)
322 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
323 {
324 num_edges++;
325 if (exec_counts)
326 {
327 e->count = exec_counts[exec_counts_pos++];
328 if (e->count > profile_info->sum_max)
329 {
330 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
331 bb->index, e->dest->index);
332 }
333 }
334 else
335 e->count = 0;
336
337 EDGE_INFO (e)->count_valid = 1;
338 BB_INFO (bb)->succ_count--;
339 BB_INFO (e->dest)->pred_count--;
340 if (dump_file)
341 {
342 fprintf (dump_file, "\nRead edge from %i to %i, count:",
343 bb->index, e->dest->index);
344 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC,
345 (HOST_WIDEST_INT) e->count);
346 }
347 }
348 }
349
350 if (dump_file)
351 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
352
353 /* For every block in the file,
354 - if every exit/entrance edge has a known count, then set the block count
355 - if the block count is known, and every exit/entrance edge but one has
356 a known execution count, then set the count of the remaining edge
357
358 As edge counts are set, decrement the succ/pred count, but don't delete
359 the edge, that way we can easily tell when all edges are known, or only
360 one edge is unknown. */
361
362 /* The order that the basic blocks are iterated through is important.
363 Since the code that finds spanning trees starts with block 0, low numbered
364 edges are put on the spanning tree in preference to high numbered edges.
365 Hence, most instrumented edges are at the end. Graph solving works much
366 faster if we propagate numbers from the end to the start.
367
368 This takes an average of slightly more than 3 passes. */
369
370 changes = 1;
371 passes = 0;
372 while (changes)
373 {
374 passes++;
375 changes = 0;
376 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
377 {
378 struct bb_info *bi = BB_INFO (bb);
379 if (! bi->count_valid)
380 {
381 if (bi->succ_count == 0)
382 {
383 edge e;
384 gcov_type total = 0;
385
386 for (e = bb->succ; e; e = e->succ_next)
387 total += e->count;
388 bb->count = total;
389 bi->count_valid = 1;
390 changes = 1;
391 }
392 else if (bi->pred_count == 0)
393 {
394 edge e;
395 gcov_type total = 0;
396
397 for (e = bb->pred; e; e = e->pred_next)
398 total += e->count;
399 bb->count = total;
400 bi->count_valid = 1;
401 changes = 1;
402 }
403 }
404 if (bi->count_valid)
405 {
406 if (bi->succ_count == 1)
407 {
408 edge e;
409 gcov_type total = 0;
410
411 /* One of the counts will be invalid, but it is zero,
412 so adding it in also doesn't hurt. */
413 for (e = bb->succ; e; e = e->succ_next)
414 total += e->count;
415
416 /* Seedgeh for the invalid edge, and set its count. */
417 for (e = bb->succ; e; e = e->succ_next)
418 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
419 break;
420
421 /* Calculate count for remaining edge by conservation. */
422 total = bb->count - total;
423
424 if (! e)
425 abort ();
426 EDGE_INFO (e)->count_valid = 1;
427 e->count = total;
428 bi->succ_count--;
429
430 BB_INFO (e->dest)->pred_count--;
431 changes = 1;
432 }
433 if (bi->pred_count == 1)
434 {
435 edge e;
436 gcov_type total = 0;
437
438 /* One of the counts will be invalid, but it is zero,
439 so adding it in also doesn't hurt. */
440 for (e = bb->pred; e; e = e->pred_next)
441 total += e->count;
442
443 /* Search for the invalid edge, and set its count. */
444 for (e = bb->pred; e; e = e->pred_next)
445 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
446 break;
447
448 /* Calculate count for remaining edge by conservation. */
449 total = bb->count - total + e->count;
450
451 if (! e)
452 abort ();
453 EDGE_INFO (e)->count_valid = 1;
454 e->count = total;
455 bi->pred_count--;
456
457 BB_INFO (e->src)->succ_count--;
458 changes = 1;
459 }
460 }
461 }
462 }
463 if (dump_file)
464 dump_flow_info (dump_file);
465
466 total_num_passes += passes;
467 if (dump_file)
468 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
469
470 /* If the graph has been correctly solved, every block will have a
471 succ and pred count of zero. */
472 FOR_EACH_BB (bb)
473 {
474 if (BB_INFO (bb)->succ_count || BB_INFO (bb)->pred_count)
475 abort ();
476 }
477
478 /* For every edge, calculate its branch probability and add a reg_note
479 to the branch insn to indicate this. */
480
481 for (i = 0; i < 20; i++)
482 hist_br_prob[i] = 0;
483 num_never_executed = 0;
484 num_branches = 0;
485
486 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
487 {
488 edge e;
489 rtx note;
490
491 if (bb->count < 0)
492 {
493 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
494 bb->index, (int)bb->count);
495 bb->count = 0;
496 }
497 for (e = bb->succ; e; e = e->succ_next)
498 {
499 /* Function may return twice in the cased the called function is
500 setjmp or calls fork, but we can't represent this by extra
501 edge from the entry, since extra edge from the exit is
502 already present. We get negative frequency from the entry
503 point. */
504 if ((e->count < 0
505 && e->dest == EXIT_BLOCK_PTR)
506 || (e->count > bb->count
507 && e->dest != EXIT_BLOCK_PTR))
508 {
509 if (block_ends_with_call_p (bb))
510 e->count = e->count < 0 ? 0 : bb->count;
511 }
512 if (e->count < 0 || e->count > bb->count)
513 {
514 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
515 e->src->index, e->dest->index,
516 (int)e->count);
517 e->count = bb->count / 2;
518 }
519 }
520 if (bb->count)
521 {
522 for (e = bb->succ; e; e = e->succ_next)
523 e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
524 if (bb->index >= 0
525 && block_ends_with_condjump_p (bb)
526 && bb->succ->succ_next)
527 {
528 int prob;
529 edge e;
530 int index;
531
532 /* Find the branch edge. It is possible that we do have fake
533 edges here. */
534 for (e = bb->succ; e->flags & (EDGE_FAKE | EDGE_FALLTHRU);
535 e = e->succ_next)
536 continue; /* Loop body has been intentionally left blank. */
537
538 prob = e->probability;
539 index = prob * 20 / REG_BR_PROB_BASE;
540
541 if (index == 20)
542 index = 19;
543 hist_br_prob[index]++;
544
545 /* Do this for RTL only. */
546 if (!ir_type ())
547 {
548 note = find_reg_note (BB_END (bb), REG_BR_PROB, 0);
549 /* There may be already note put by some other pass, such
550 as builtin_expect expander. */
551 if (note)
552 XEXP (note, 0) = GEN_INT (prob);
553 else
554 REG_NOTES (BB_END (bb))
555 = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
556 REG_NOTES (BB_END (bb)));
557 }
558 num_branches++;
559 }
560 }
561 /* Otherwise distribute the probabilities evenly so we get sane
562 sum. Use simple heuristics that if there are normal edges,
563 give all abnormals frequency of 0, otherwise distribute the
564 frequency over abnormals (this is the case of noreturn
565 calls). */
566 else
567 {
568 int total = 0;
569
570 for (e = bb->succ; e; e = e->succ_next)
571 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
572 total ++;
573 if (total)
574 {
575 for (e = bb->succ; e; e = e->succ_next)
576 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
577 e->probability = REG_BR_PROB_BASE / total;
578 else
579 e->probability = 0;
580 }
581 else
582 {
583 for (e = bb->succ; e; e = e->succ_next)
584 total ++;
585 for (e = bb->succ; e; e = e->succ_next)
586 e->probability = REG_BR_PROB_BASE / total;
587 }
588 if (bb->index >= 0
589 && block_ends_with_condjump_p (bb)
590 && bb->succ->succ_next)
591 num_branches++, num_never_executed;
592 }
593 }
594
595 if (dump_file)
596 {
597 fprintf (dump_file, "%d branches\n", num_branches);
598 fprintf (dump_file, "%d branches never executed\n",
599 num_never_executed);
600 if (num_branches)
601 for (i = 0; i < 10; i++)
602 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
603 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
604 5 * i, 5 * i + 5);
605
606 total_num_branches += num_branches;
607 total_num_never_executed += num_never_executed;
608 for (i = 0; i < 20; i++)
609 total_hist_br_prob[i] += hist_br_prob[i];
610
611 fputc ('\n', dump_file);
612 fputc ('\n', dump_file);
613 }
614
615 free_aux_for_blocks ();
616 }
617
618 /* Load value histograms for N_VALUES values whose description is stored
619 in VALUES array from .da file. */
620 static void
621 compute_value_histograms (unsigned n_values, struct histogram_value *values)
622 {
623 unsigned i, j, t, any;
624 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
625 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
626 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
627 gcov_type *aact_count;
628
629 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
630 n_histogram_counters[t] = 0;
631
632 for (i = 0; i < n_values; i++)
633 n_histogram_counters[(int) (values[i].type)] += values[i].n_counters;
634
635 any = 0;
636 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
637 {
638 if (!n_histogram_counters[t])
639 {
640 histogram_counts[t] = NULL;
641 continue;
642 }
643
644 histogram_counts[t] =
645 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
646 n_histogram_counters[t], NULL);
647 if (histogram_counts[t])
648 any = 1;
649 act_count[t] = histogram_counts[t];
650 }
651 if (!any)
652 return;
653
654 for (i = 0; i < n_values; i++)
655 {
656 rtx hist_list = NULL_RTX;
657 t = (int) (values[i].type);
658
659 /* FIXME: make this work for trees. */
660 if (!ir_type ())
661 {
662 aact_count = act_count[t];
663 act_count[t] += values[i].n_counters;
664 for (j = values[i].n_counters; j > 0; j--)
665 hist_list = alloc_EXPR_LIST (0, GEN_INT (aact_count[j - 1]),
666 hist_list);
667 hist_list = alloc_EXPR_LIST (0,
668 copy_rtx ((rtx)values[i].value), hist_list);
669 hist_list = alloc_EXPR_LIST (0, GEN_INT (values[i].type), hist_list);
670 REG_NOTES ((rtx)values[i].insn) =
671 alloc_EXPR_LIST (REG_VALUE_PROFILE, hist_list,
672 REG_NOTES ((rtx)values[i].insn));
673 }
674 }
675
676 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
677 if (histogram_counts[t])
678 free (histogram_counts[t]);
679 }
680
681 /* Instrument and/or analyze program behavior based on program flow graph.
682 In either case, this function builds a flow graph for the function being
683 compiled. The flow graph is stored in BB_GRAPH.
684
685 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
686 the flow graph that are needed to reconstruct the dynamic behavior of the
687 flow graph.
688
689 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
690 information from a data file containing edge count information from previous
691 executions of the function being compiled. In this case, the flow graph is
692 annotated with actual execution counts, which are later propagated into the
693 rtl for optimization purposes.
694
695 Main entry point of this file. */
696
697 void
698 branch_prob (void)
699 {
700 basic_block bb;
701 unsigned i;
702 unsigned num_edges, ignored_edges;
703 unsigned num_instrumented;
704 struct edge_list *el;
705 unsigned n_values = 0;
706 struct histogram_value *values = NULL;
707
708 total_num_times_called++;
709
710 flow_call_edges_add (NULL);
711 add_noreturn_fake_exit_edges ();
712
713 /* We can't handle cyclic regions constructed using abnormal edges.
714 To avoid these we replace every source of abnormal edge by a fake
715 edge from entry node and every destination by fake edge to exit.
716 This keeps graph acyclic and our calculation exact for all normal
717 edges except for exit and entrance ones.
718
719 We also add fake exit edges for each call and asm statement in the
720 basic, since it may not return. */
721
722 FOR_EACH_BB (bb)
723 {
724 int need_exit_edge = 0, need_entry_edge = 0;
725 int have_exit_edge = 0, have_entry_edge = 0;
726 edge e;
727
728 /* Functions returning multiple times are not handled by extra edges.
729 Instead we simply allow negative counts on edges from exit to the
730 block past call and corresponding probabilities. We can't go
731 with the extra edges because that would result in flowgraph that
732 needs to have fake edges outside the spanning tree. */
733
734 for (e = bb->succ; e; e = e->succ_next)
735 {
736 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
737 && e->dest != EXIT_BLOCK_PTR)
738 need_exit_edge = 1;
739 if (e->dest == EXIT_BLOCK_PTR)
740 have_exit_edge = 1;
741 }
742 for (e = bb->pred; e; e = e->pred_next)
743 {
744 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
745 && e->src != ENTRY_BLOCK_PTR)
746 need_entry_edge = 1;
747 if (e->src == ENTRY_BLOCK_PTR)
748 have_entry_edge = 1;
749 }
750
751 if (need_exit_edge && !have_exit_edge)
752 {
753 if (dump_file)
754 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
755 bb->index);
756 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
757 }
758 if (need_entry_edge && !have_entry_edge)
759 {
760 if (dump_file)
761 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
762 bb->index);
763 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
764 }
765 }
766
767 el = create_edge_list ();
768 num_edges = NUM_EDGES (el);
769 alloc_aux_for_edges (sizeof (struct edge_info));
770
771 /* The basic blocks are expected to be numbered sequentially. */
772 compact_blocks ();
773
774 ignored_edges = 0;
775 for (i = 0 ; i < num_edges ; i++)
776 {
777 edge e = INDEX_EDGE (el, i);
778 e->count = 0;
779
780 /* Mark edges we've replaced by fake edges above as ignored. */
781 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
782 && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
783 {
784 EDGE_INFO (e)->ignore = 1;
785 ignored_edges++;
786 }
787 }
788
789 #ifdef ENABLE_CHECKING
790 verify_flow_info ();
791 #endif
792
793 /* Create spanning tree from basic block graph, mark each edge that is
794 on the spanning tree. We insert as many abnormal and critical edges
795 as possible to minimize number of edge splits necessary. */
796
797 find_spanning_tree (el);
798
799 /* Fake edges that are not on the tree will not be instrumented, so
800 mark them ignored. */
801 for (num_instrumented = i = 0; i < num_edges; i++)
802 {
803 edge e = INDEX_EDGE (el, i);
804 struct edge_info *inf = EDGE_INFO (e);
805
806 if (inf->ignore || inf->on_tree)
807 /*NOP*/;
808 else if (e->flags & EDGE_FAKE)
809 {
810 inf->ignore = 1;
811 ignored_edges++;
812 }
813 else
814 num_instrumented++;
815 }
816
817 total_num_blocks += n_basic_blocks + 2;
818 if (dump_file)
819 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks);
820
821 total_num_edges += num_edges;
822 if (dump_file)
823 fprintf (dump_file, "%d edges\n", num_edges);
824
825 total_num_edges_ignored += ignored_edges;
826 if (dump_file)
827 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
828
829 /* Write the data from which gcov can reconstruct the basic block
830 graph. */
831
832 /* Basic block flags */
833 if (coverage_begin_output ())
834 {
835 gcov_position_t offset;
836
837 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
838 for (i = 0; i != (unsigned) (n_basic_blocks + 2); i++)
839 gcov_write_unsigned (0);
840 gcov_write_length (offset);
841 }
842
843 /* Keep all basic block indexes nonnegative in the gcov output.
844 Index 0 is used for entry block, last index is for exit block.
845 */
846 ENTRY_BLOCK_PTR->index = -1;
847 EXIT_BLOCK_PTR->index = last_basic_block;
848 #define BB_TO_GCOV_INDEX(bb) ((bb)->index + 1)
849
850 /* Arcs */
851 if (coverage_begin_output ())
852 {
853 gcov_position_t offset;
854
855 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
856 {
857 edge e;
858
859 offset = gcov_write_tag (GCOV_TAG_ARCS);
860 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
861
862 for (e = bb->succ; e; e = e->succ_next)
863 {
864 struct edge_info *i = EDGE_INFO (e);
865 if (!i->ignore)
866 {
867 unsigned flag_bits = 0;
868
869 if (i->on_tree)
870 flag_bits |= GCOV_ARC_ON_TREE;
871 if (e->flags & EDGE_FAKE)
872 flag_bits |= GCOV_ARC_FAKE;
873 if (e->flags & EDGE_FALLTHRU)
874 flag_bits |= GCOV_ARC_FALLTHROUGH;
875
876 gcov_write_unsigned (BB_TO_GCOV_INDEX (e->dest));
877 gcov_write_unsigned (flag_bits);
878 }
879 }
880
881 gcov_write_length (offset);
882 }
883 }
884
885 /* Line numbers. */
886 /* FIXME: make this work for trees. (Line numbers are in location_t
887 objects, but aren't always attached to the obvious tree...) */
888 if (coverage_begin_output () && !ir_type ())
889 {
890 char const *prev_file_name = NULL;
891 gcov_position_t offset;
892
893 FOR_EACH_BB (bb)
894 {
895 rtx insn = BB_HEAD (bb);
896 int ignore_next_note = 0;
897
898 offset = 0;
899
900 /* We are looking for line number notes. Search backward
901 before basic block to find correct ones. */
902 insn = prev_nonnote_insn (insn);
903 if (!insn)
904 insn = get_insns ();
905 else
906 insn = NEXT_INSN (insn);
907
908 while (insn != BB_END (bb))
909 {
910 if (GET_CODE (insn) == NOTE)
911 {
912 /* Must ignore the line number notes that
913 immediately follow the end of an inline function
914 to avoid counting it twice. There is a note
915 before the call, and one after the call. */
916 if (NOTE_LINE_NUMBER (insn)
917 == NOTE_INSN_REPEATED_LINE_NUMBER)
918 ignore_next_note = 1;
919 else if (NOTE_LINE_NUMBER (insn) <= 0)
920 /*NOP*/;
921 else if (ignore_next_note)
922 ignore_next_note = 0;
923 else
924 {
925 if (!offset)
926 {
927 offset = gcov_write_tag (GCOV_TAG_LINES);
928 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
929 }
930
931 /* If this is a new source file, then output the
932 file's name to the .bb file. */
933 if (!prev_file_name
934 || strcmp (NOTE_SOURCE_FILE (insn),
935 prev_file_name))
936 {
937 prev_file_name = NOTE_SOURCE_FILE (insn);
938 gcov_write_unsigned (0);
939 gcov_write_string (prev_file_name);
940 }
941 gcov_write_unsigned (NOTE_LINE_NUMBER (insn));
942 }
943 }
944 insn = NEXT_INSN (insn);
945 }
946
947 if (offset)
948 {
949 /* A file of NULL indicates the end of run. */
950 gcov_write_unsigned (0);
951 gcov_write_string (NULL);
952 gcov_write_length (offset);
953 }
954 }
955 }
956
957 ENTRY_BLOCK_PTR->index = ENTRY_BLOCK;
958 EXIT_BLOCK_PTR->index = EXIT_BLOCK;
959 #undef BB_TO_GCOV_INDEX
960
961 if (flag_profile_values)
962 find_values_to_profile (&n_values, &values);
963
964 if (flag_branch_probabilities)
965 {
966 compute_branch_probabilities ();
967 if (flag_profile_values)
968 compute_value_histograms (n_values, values);
969 }
970
971 /* For each edge not on the spanning tree, add counting code. */
972 if (profile_arc_flag
973 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
974 {
975 unsigned n_instrumented = instrument_edges (el);
976
977 if (n_instrumented != num_instrumented)
978 abort ();
979
980 if (flag_profile_values)
981 instrument_values (n_values, values);
982
983 /* Commit changes done by instrumentation. */
984 if (ir_type ())
985 bsi_commit_edge_inserts ((int *)NULL);
986 else
987 {
988 commit_edge_insertions_watch_calls ();
989 allocate_reg_info (max_reg_num (), FALSE, FALSE);
990 }
991 }
992
993 remove_fake_edges ();
994 free_aux_for_edges ();
995
996 if (!ir_type ())
997 {
998 /* Re-merge split basic blocks and the mess introduced by
999 insert_insn_on_edge. */
1000 cleanup_cfg (profile_arc_flag ? CLEANUP_EXPENSIVE : 0);
1001 if (profile_dump_file())
1002 dump_flow_info (profile_dump_file());
1003 }
1004
1005 free_edge_list (el);
1006 }
1007 \f
1008 /* Union find algorithm implementation for the basic blocks using
1009 aux fields. */
1010
1011 static basic_block
1012 find_group (basic_block bb)
1013 {
1014 basic_block group = bb, bb1;
1015
1016 while ((basic_block) group->aux != group)
1017 group = (basic_block) group->aux;
1018
1019 /* Compress path. */
1020 while ((basic_block) bb->aux != group)
1021 {
1022 bb1 = (basic_block) bb->aux;
1023 bb->aux = (void *) group;
1024 bb = bb1;
1025 }
1026 return group;
1027 }
1028
1029 static void
1030 union_groups (basic_block bb1, basic_block bb2)
1031 {
1032 basic_block bb1g = find_group (bb1);
1033 basic_block bb2g = find_group (bb2);
1034
1035 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1036 this code is unlikely going to be performance problem anyway. */
1037 if (bb1g == bb2g)
1038 abort ();
1039
1040 bb1g->aux = bb2g;
1041 }
1042 \f
1043 /* This function searches all of the edges in the program flow graph, and puts
1044 as many bad edges as possible onto the spanning tree. Bad edges include
1045 abnormals edges, which can't be instrumented at the moment. Since it is
1046 possible for fake edges to form a cycle, we will have to develop some
1047 better way in the future. Also put critical edges to the tree, since they
1048 are more expensive to instrument. */
1049
1050 static void
1051 find_spanning_tree (struct edge_list *el)
1052 {
1053 int i;
1054 int num_edges = NUM_EDGES (el);
1055 basic_block bb;
1056
1057 /* We use aux field for standard union-find algorithm. */
1058 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1059 bb->aux = bb;
1060
1061 /* Add fake edge exit to entry we can't instrument. */
1062 union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
1063
1064 /* First add all abnormal edges to the tree unless they form a cycle. Also
1065 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1066 setting return value from function. */
1067 for (i = 0; i < num_edges; i++)
1068 {
1069 edge e = INDEX_EDGE (el, i);
1070 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1071 || e->dest == EXIT_BLOCK_PTR)
1072 && !EDGE_INFO (e)->ignore
1073 && (find_group (e->src) != find_group (e->dest)))
1074 {
1075 if (dump_file)
1076 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1077 e->src->index, e->dest->index);
1078 EDGE_INFO (e)->on_tree = 1;
1079 union_groups (e->src, e->dest);
1080 }
1081 }
1082
1083 /* Now insert all critical edges to the tree unless they form a cycle. */
1084 for (i = 0; i < num_edges; i++)
1085 {
1086 edge e = INDEX_EDGE (el, i);
1087 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1088 && find_group (e->src) != find_group (e->dest))
1089 {
1090 if (dump_file)
1091 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1092 e->src->index, e->dest->index);
1093 EDGE_INFO (e)->on_tree = 1;
1094 union_groups (e->src, e->dest);
1095 }
1096 }
1097
1098 /* And now the rest. */
1099 for (i = 0; i < num_edges; i++)
1100 {
1101 edge e = INDEX_EDGE (el, i);
1102 if (!EDGE_INFO (e)->ignore
1103 && find_group (e->src) != find_group (e->dest))
1104 {
1105 if (dump_file)
1106 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1107 e->src->index, e->dest->index);
1108 EDGE_INFO (e)->on_tree = 1;
1109 union_groups (e->src, e->dest);
1110 }
1111 }
1112
1113 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1114 bb->aux = NULL;
1115 }
1116 \f
1117 /* Perform file-level initialization for branch-prob processing. */
1118
1119 void
1120 init_branch_prob (void)
1121 {
1122 int i;
1123
1124 total_num_blocks = 0;
1125 total_num_edges = 0;
1126 total_num_edges_ignored = 0;
1127 total_num_edges_instrumented = 0;
1128 total_num_blocks_created = 0;
1129 total_num_passes = 0;
1130 total_num_times_called = 0;
1131 total_num_branches = 0;
1132 total_num_never_executed = 0;
1133 for (i = 0; i < 20; i++)
1134 total_hist_br_prob[i] = 0;
1135 }
1136
1137 /* Performs file-level cleanup after branch-prob processing
1138 is completed. */
1139
1140 void
1141 end_branch_prob (void)
1142 {
1143 if (dump_file)
1144 {
1145 fprintf (dump_file, "\n");
1146 fprintf (dump_file, "Total number of blocks: %d\n",
1147 total_num_blocks);
1148 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1149 fprintf (dump_file, "Total number of ignored edges: %d\n",
1150 total_num_edges_ignored);
1151 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1152 total_num_edges_instrumented);
1153 fprintf (dump_file, "Total number of blocks created: %d\n",
1154 total_num_blocks_created);
1155 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1156 total_num_passes);
1157 if (total_num_times_called != 0)
1158 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1159 (total_num_passes + (total_num_times_called >> 1))
1160 / total_num_times_called);
1161 fprintf (dump_file, "Total number of branches: %d\n",
1162 total_num_branches);
1163 fprintf (dump_file, "Total number of branches never executed: %d\n",
1164 total_num_never_executed);
1165 if (total_num_branches)
1166 {
1167 int i;
1168
1169 for (i = 0; i < 10; i++)
1170 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1171 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1172 / total_num_branches, 5*i, 5*i+5);
1173 }
1174 }
1175 }
1176
1177 /* Set up hooks to enable tree-based profiling. */
1178
1179 void
1180 tree_register_profile_hooks (void)
1181 {
1182 profile_hooks = &tree_profile_hooks;
1183 if (!ir_type ())
1184 abort ();
1185 }
1186
1187 /* Set up hooks to enable RTL-based profiling. */
1188
1189 void
1190 rtl_register_profile_hooks (void)
1191 {
1192 profile_hooks = &rtl_profile_hooks;
1193 if (ir_type ())
1194 abort ();
1195 }