]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/profile.c
vec.h: Update API to separate allocation mechanism from type.
[thirdparty/gcc.git] / gcc / profile.c
1 /* Calculate branch probabilities, and basic block execution counts.
2 Copyright (C) 1990, 1991, 1992, 1993, 1994, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
5 based on some ideas from Dain Samples of UC Berkeley.
6 Further mangling by Bob Manson, Cygnus Support.
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 2, or (at your option) any later
13 version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING. If not, write to the Free
22 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 02111-1307, USA. */
24
25 /* Generate basic block profile instrumentation and auxiliary files.
26 Profile generation is optimized, so that not all arcs in the basic
27 block graph need instrumenting. First, the BB graph is closed with
28 one entry (function start), and one exit (function exit). Any
29 ABNORMAL_EDGE cannot be instrumented (because there is no control
30 path to place the code). We close the graph by inserting fake
31 EDGE_FAKE edges to the EXIT_BLOCK, from the sources of abnormal
32 edges that do not go to the exit_block. We ignore such abnormal
33 edges. Naturally these fake edges are never directly traversed,
34 and so *cannot* be directly instrumented. Some other graph
35 massaging is done. To optimize the instrumentation we generate the
36 BB minimal span tree, only edges that are not on the span tree
37 (plus the entry point) need instrumenting. From that information
38 all other edge counts can be deduced. By construction all fake
39 edges must be on the spanning tree. We also attempt to place
40 EDGE_CRITICAL edges on the spanning tree.
41
42 The auxiliary files generated are <dumpbase>.gcno (at compile time)
43 and <dumpbase>.gcda (at run time). The format is
44 described in full in gcov-io.h. */
45
46 /* ??? Register allocation should use basic block execution counts to
47 give preference to the most commonly executed blocks. */
48
49 /* ??? Should calculate branch probabilities before instrumenting code, since
50 then we can use arc counts to help decide which arcs to instrument. */
51
52 #include "config.h"
53 #include "system.h"
54 #include "coretypes.h"
55 #include "tm.h"
56 #include "rtl.h"
57 #include "flags.h"
58 #include "output.h"
59 #include "regs.h"
60 #include "expr.h"
61 #include "function.h"
62 #include "toplev.h"
63 #include "coverage.h"
64 #include "value-prof.h"
65 #include "tree.h"
66 #include "cfghooks.h"
67 #include "tree-flow.h"
68
69 /* Hooks for profiling. */
70 static struct profile_hooks* profile_hooks;
71
72 /* File for profiling debug output. */
73 static inline FILE*
74 profile_dump_file (void) {
75 return profile_hooks->profile_dump_file ();
76 }
77
78 /* Additional information about the edges we need. */
79 struct edge_info {
80 unsigned int count_valid : 1;
81
82 /* Is on the spanning tree. */
83 unsigned int on_tree : 1;
84
85 /* Pretend this edge does not exist (it is abnormal and we've
86 inserted a fake to compensate). */
87 unsigned int ignore : 1;
88 };
89
90 struct bb_info {
91 unsigned int count_valid : 1;
92
93 /* Number of successor and predecessor edges. */
94 gcov_type succ_count;
95 gcov_type pred_count;
96 };
97
98 #define EDGE_INFO(e) ((struct edge_info *) (e)->aux)
99 #define BB_INFO(b) ((struct bb_info *) (b)->aux)
100
101 /* Counter summary from the last set of coverage counts read. */
102
103 const struct gcov_ctr_summary *profile_info;
104
105 /* Collect statistics on the performance of this pass for the entire source
106 file. */
107
108 static int total_num_blocks;
109 static int total_num_edges;
110 static int total_num_edges_ignored;
111 static int total_num_edges_instrumented;
112 static int total_num_blocks_created;
113 static int total_num_passes;
114 static int total_num_times_called;
115 static int total_hist_br_prob[20];
116 static int total_num_never_executed;
117 static int total_num_branches;
118
119 /* Forward declarations. */
120 static void find_spanning_tree (struct edge_list *);
121 static unsigned instrument_edges (struct edge_list *);
122 static void instrument_values (histogram_values);
123 static void compute_branch_probabilities (void);
124 static void compute_value_histograms (histogram_values);
125 static gcov_type * get_exec_counts (void);
126 static basic_block find_group (basic_block);
127 static void union_groups (basic_block, basic_block);
128
129 \f
130 /* Add edge instrumentation code to the entire insn chain.
131
132 F is the first insn of the chain.
133 NUM_BLOCKS is the number of basic blocks found in F. */
134
135 static unsigned
136 instrument_edges (struct edge_list *el)
137 {
138 unsigned num_instr_edges = 0;
139 int num_edges = NUM_EDGES (el);
140 basic_block bb;
141
142 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
143 {
144 edge e;
145 edge_iterator ei;
146
147 FOR_EACH_EDGE (e, ei, bb->succs)
148 {
149 struct edge_info *inf = EDGE_INFO (e);
150
151 if (!inf->ignore && !inf->on_tree)
152 {
153 gcc_assert (!(e->flags & EDGE_ABNORMAL));
154 if (dump_file)
155 fprintf (dump_file, "Edge %d to %d instrumented%s\n",
156 e->src->index, e->dest->index,
157 EDGE_CRITICAL_P (e) ? " (and split)" : "");
158 (profile_hooks->gen_edge_profiler) (num_instr_edges++, e);
159 }
160 }
161 }
162
163 total_num_blocks_created += num_edges;
164 if (dump_file)
165 fprintf (dump_file, "%d edges instrumented\n", num_instr_edges);
166 return num_instr_edges;
167 }
168
169 /* Add code to measure histograms for values in list VALUES. */
170 static void
171 instrument_values (histogram_values values)
172 {
173 unsigned i, t;
174
175 /* Emit code to generate the histograms before the insns. */
176
177 for (i = 0; i < VEC_length (histogram_value, values); i++)
178 {
179 histogram_value hist = VEC_index (histogram_value, values, i);
180 switch (hist->type)
181 {
182 case HIST_TYPE_INTERVAL:
183 t = GCOV_COUNTER_V_INTERVAL;
184 break;
185
186 case HIST_TYPE_POW2:
187 t = GCOV_COUNTER_V_POW2;
188 break;
189
190 case HIST_TYPE_SINGLE_VALUE:
191 t = GCOV_COUNTER_V_SINGLE;
192 break;
193
194 case HIST_TYPE_CONST_DELTA:
195 t = GCOV_COUNTER_V_DELTA;
196 break;
197
198 default:
199 gcc_unreachable ();
200 }
201 if (!coverage_counter_alloc (t, hist->n_counters))
202 continue;
203
204 switch (hist->type)
205 {
206 case HIST_TYPE_INTERVAL:
207 (profile_hooks->gen_interval_profiler) (hist, t, 0);
208 break;
209
210 case HIST_TYPE_POW2:
211 (profile_hooks->gen_pow2_profiler) (hist, t, 0);
212 break;
213
214 case HIST_TYPE_SINGLE_VALUE:
215 (profile_hooks->gen_one_value_profiler) (hist, t, 0);
216 break;
217
218 case HIST_TYPE_CONST_DELTA:
219 (profile_hooks->gen_const_delta_profiler) (hist, t, 0);
220 break;
221
222 default:
223 gcc_unreachable ();
224 }
225 }
226 VEC_free (histogram_value, heap, values);
227 }
228 \f
229
230 /* Computes hybrid profile for all matching entries in da_file. */
231
232 static gcov_type *
233 get_exec_counts (void)
234 {
235 unsigned num_edges = 0;
236 basic_block bb;
237 gcov_type *counts;
238
239 /* Count the edges to be (possibly) instrumented. */
240 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
241 {
242 edge e;
243 edge_iterator ei;
244
245 FOR_EACH_EDGE (e, ei, bb->succs)
246 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
247 num_edges++;
248 }
249
250 counts = get_coverage_counts (GCOV_COUNTER_ARCS, num_edges, &profile_info);
251 if (!counts)
252 return NULL;
253
254 if (dump_file && profile_info)
255 fprintf(dump_file, "Merged %u profiles with maximal count %u.\n",
256 profile_info->runs, (unsigned) profile_info->sum_max);
257
258 return counts;
259 }
260 \f
261
262 /* Compute the branch probabilities for the various branches.
263 Annotate them accordingly. */
264
265 static void
266 compute_branch_probabilities (void)
267 {
268 basic_block bb;
269 int i;
270 int num_edges = 0;
271 int changes;
272 int passes;
273 int hist_br_prob[20];
274 int num_never_executed;
275 int num_branches;
276 gcov_type *exec_counts = get_exec_counts ();
277 int exec_counts_pos = 0;
278
279 /* Very simple sanity checks so we catch bugs in our profiling code. */
280 if (profile_info)
281 {
282 if (profile_info->run_max * profile_info->runs < profile_info->sum_max)
283 {
284 error ("corrupted profile info: run_max * runs < sum_max");
285 exec_counts = NULL;
286 }
287
288 if (profile_info->sum_all < profile_info->sum_max)
289 {
290 error ("corrupted profile info: sum_all is smaller than sum_max");
291 exec_counts = NULL;
292 }
293 }
294
295 /* Attach extra info block to each bb. */
296
297 alloc_aux_for_blocks (sizeof (struct bb_info));
298 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
299 {
300 edge e;
301 edge_iterator ei;
302
303 FOR_EACH_EDGE (e, ei, bb->succs)
304 if (!EDGE_INFO (e)->ignore)
305 BB_INFO (bb)->succ_count++;
306 FOR_EACH_EDGE (e, ei, bb->preds)
307 if (!EDGE_INFO (e)->ignore)
308 BB_INFO (bb)->pred_count++;
309 }
310
311 /* Avoid predicting entry on exit nodes. */
312 BB_INFO (EXIT_BLOCK_PTR)->succ_count = 2;
313 BB_INFO (ENTRY_BLOCK_PTR)->pred_count = 2;
314
315 /* For each edge not on the spanning tree, set its execution count from
316 the .da file. */
317
318 /* The first count in the .da file is the number of times that the function
319 was entered. This is the exec_count for block zero. */
320
321 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
322 {
323 edge e;
324 edge_iterator ei;
325
326 FOR_EACH_EDGE (e, ei, bb->succs)
327 if (!EDGE_INFO (e)->ignore && !EDGE_INFO (e)->on_tree)
328 {
329 num_edges++;
330 if (exec_counts)
331 {
332 e->count = exec_counts[exec_counts_pos++];
333 if (e->count > profile_info->sum_max)
334 {
335 error ("corrupted profile info: edge from %i to %i exceeds maximal count",
336 bb->index, e->dest->index);
337 }
338 }
339 else
340 e->count = 0;
341
342 EDGE_INFO (e)->count_valid = 1;
343 BB_INFO (bb)->succ_count--;
344 BB_INFO (e->dest)->pred_count--;
345 if (dump_file)
346 {
347 fprintf (dump_file, "\nRead edge from %i to %i, count:",
348 bb->index, e->dest->index);
349 fprintf (dump_file, HOST_WIDEST_INT_PRINT_DEC,
350 (HOST_WIDEST_INT) e->count);
351 }
352 }
353 }
354
355 if (dump_file)
356 fprintf (dump_file, "\n%d edge counts read\n", num_edges);
357
358 /* For every block in the file,
359 - if every exit/entrance edge has a known count, then set the block count
360 - if the block count is known, and every exit/entrance edge but one has
361 a known execution count, then set the count of the remaining edge
362
363 As edge counts are set, decrement the succ/pred count, but don't delete
364 the edge, that way we can easily tell when all edges are known, or only
365 one edge is unknown. */
366
367 /* The order that the basic blocks are iterated through is important.
368 Since the code that finds spanning trees starts with block 0, low numbered
369 edges are put on the spanning tree in preference to high numbered edges.
370 Hence, most instrumented edges are at the end. Graph solving works much
371 faster if we propagate numbers from the end to the start.
372
373 This takes an average of slightly more than 3 passes. */
374
375 changes = 1;
376 passes = 0;
377 while (changes)
378 {
379 passes++;
380 changes = 0;
381 FOR_BB_BETWEEN (bb, EXIT_BLOCK_PTR, NULL, prev_bb)
382 {
383 struct bb_info *bi = BB_INFO (bb);
384 if (! bi->count_valid)
385 {
386 if (bi->succ_count == 0)
387 {
388 edge e;
389 edge_iterator ei;
390 gcov_type total = 0;
391
392 FOR_EACH_EDGE (e, ei, bb->succs)
393 total += e->count;
394 bb->count = total;
395 bi->count_valid = 1;
396 changes = 1;
397 }
398 else if (bi->pred_count == 0)
399 {
400 edge e;
401 edge_iterator ei;
402 gcov_type total = 0;
403
404 FOR_EACH_EDGE (e, ei, bb->preds)
405 total += e->count;
406 bb->count = total;
407 bi->count_valid = 1;
408 changes = 1;
409 }
410 }
411 if (bi->count_valid)
412 {
413 if (bi->succ_count == 1)
414 {
415 edge e;
416 edge_iterator ei;
417 gcov_type total = 0;
418
419 /* One of the counts will be invalid, but it is zero,
420 so adding it in also doesn't hurt. */
421 FOR_EACH_EDGE (e, ei, bb->succs)
422 total += e->count;
423
424 /* Seedgeh for the invalid edge, and set its count. */
425 FOR_EACH_EDGE (e, ei, bb->succs)
426 if (! EDGE_INFO (e)->count_valid && ! EDGE_INFO (e)->ignore)
427 break;
428
429 /* Calculate count for remaining edge by conservation. */
430 total = bb->count - total;
431
432 gcc_assert (e);
433 EDGE_INFO (e)->count_valid = 1;
434 e->count = total;
435 bi->succ_count--;
436
437 BB_INFO (e->dest)->pred_count--;
438 changes = 1;
439 }
440 if (bi->pred_count == 1)
441 {
442 edge e;
443 edge_iterator ei;
444 gcov_type total = 0;
445
446 /* One of the counts will be invalid, but it is zero,
447 so adding it in also doesn't hurt. */
448 FOR_EACH_EDGE (e, ei, bb->preds)
449 total += e->count;
450
451 /* Search for the invalid edge, and set its count. */
452 FOR_EACH_EDGE (e, ei, bb->preds)
453 if (!EDGE_INFO (e)->count_valid && !EDGE_INFO (e)->ignore)
454 break;
455
456 /* Calculate count for remaining edge by conservation. */
457 total = bb->count - total + e->count;
458
459 gcc_assert (e);
460 EDGE_INFO (e)->count_valid = 1;
461 e->count = total;
462 bi->pred_count--;
463
464 BB_INFO (e->src)->succ_count--;
465 changes = 1;
466 }
467 }
468 }
469 }
470 if (dump_file)
471 dump_flow_info (dump_file);
472
473 total_num_passes += passes;
474 if (dump_file)
475 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
476
477 /* If the graph has been correctly solved, every block will have a
478 succ and pred count of zero. */
479 FOR_EACH_BB (bb)
480 {
481 gcc_assert (!BB_INFO (bb)->succ_count && !BB_INFO (bb)->pred_count);
482 }
483
484 /* For every edge, calculate its branch probability and add a reg_note
485 to the branch insn to indicate this. */
486
487 for (i = 0; i < 20; i++)
488 hist_br_prob[i] = 0;
489 num_never_executed = 0;
490 num_branches = 0;
491
492 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
493 {
494 edge e;
495 edge_iterator ei;
496 rtx note;
497
498 if (bb->count < 0)
499 {
500 error ("corrupted profile info: number of iterations for basic block %d thought to be %i",
501 bb->index, (int)bb->count);
502 bb->count = 0;
503 }
504 FOR_EACH_EDGE (e, ei, bb->succs)
505 {
506 /* Function may return twice in the cased the called function is
507 setjmp or calls fork, but we can't represent this by extra
508 edge from the entry, since extra edge from the exit is
509 already present. We get negative frequency from the entry
510 point. */
511 if ((e->count < 0
512 && e->dest == EXIT_BLOCK_PTR)
513 || (e->count > bb->count
514 && e->dest != EXIT_BLOCK_PTR))
515 {
516 if (block_ends_with_call_p (bb))
517 e->count = e->count < 0 ? 0 : bb->count;
518 }
519 if (e->count < 0 || e->count > bb->count)
520 {
521 error ("corrupted profile info: number of executions for edge %d-%d thought to be %i",
522 e->src->index, e->dest->index,
523 (int)e->count);
524 e->count = bb->count / 2;
525 }
526 }
527 if (bb->count)
528 {
529 FOR_EACH_EDGE (e, ei, bb->succs)
530 e->probability = (e->count * REG_BR_PROB_BASE + bb->count / 2) / bb->count;
531 if (bb->index >= 0
532 && block_ends_with_condjump_p (bb)
533 && EDGE_COUNT (bb->succs) >= 2)
534 {
535 int prob;
536 edge e;
537 int index;
538
539 /* Find the branch edge. It is possible that we do have fake
540 edges here. */
541 FOR_EACH_EDGE (e, ei, bb->succs)
542 if (!(e->flags & (EDGE_FAKE | EDGE_FALLTHRU)))
543 break;
544
545 prob = e->probability;
546 index = prob * 20 / REG_BR_PROB_BASE;
547
548 if (index == 20)
549 index = 19;
550 hist_br_prob[index]++;
551
552 /* Do this for RTL only. */
553 if (!ir_type ())
554 {
555 note = find_reg_note (BB_END (bb), REG_BR_PROB, 0);
556 /* There may be already note put by some other pass, such
557 as builtin_expect expander. */
558 if (note)
559 XEXP (note, 0) = GEN_INT (prob);
560 else
561 REG_NOTES (BB_END (bb))
562 = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
563 REG_NOTES (BB_END (bb)));
564 }
565 num_branches++;
566 }
567 }
568 /* Otherwise try to preserve the existing REG_BR_PROB probabilities
569 tree based profile guessing put into code. BB can be the
570 ENTRY_BLOCK, and it can have multiple (fake) successors in
571 EH cases, but it still has no code; don't crash in this case. */
572 else if (profile_status == PROFILE_ABSENT
573 && !ir_type ()
574 && EDGE_COUNT (bb->succs) > 1
575 && BB_END (bb)
576 && (note = find_reg_note (BB_END (bb), REG_BR_PROB, 0)))
577 {
578 int prob = INTVAL (XEXP (note, 0));
579
580 BRANCH_EDGE (bb)->probability = prob;
581 FALLTHRU_EDGE (bb)->probability = REG_BR_PROB_BASE - prob;
582 }
583 /* As a last resort, distribute the probabilities evenly.
584 Use simple heuristics that if there are normal edges,
585 give all abnormals frequency of 0, otherwise distribute the
586 frequency over abnormals (this is the case of noreturn
587 calls). */
588 else if (profile_status == PROFILE_ABSENT)
589 {
590 int total = 0;
591
592 FOR_EACH_EDGE (e, ei, bb->succs)
593 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
594 total ++;
595 if (total)
596 {
597 FOR_EACH_EDGE (e, ei, bb->succs)
598 if (!(e->flags & (EDGE_COMPLEX | EDGE_FAKE)))
599 e->probability = REG_BR_PROB_BASE / total;
600 else
601 e->probability = 0;
602 }
603 else
604 {
605 total += EDGE_COUNT (bb->succs);
606 FOR_EACH_EDGE (e, ei, bb->succs)
607 e->probability = REG_BR_PROB_BASE / total;
608 }
609 if (bb->index >= 0
610 && block_ends_with_condjump_p (bb)
611 && EDGE_COUNT (bb->succs) >= 2)
612 num_branches++, num_never_executed;
613 }
614 }
615 counts_to_freqs ();
616
617 if (dump_file)
618 {
619 fprintf (dump_file, "%d branches\n", num_branches);
620 fprintf (dump_file, "%d branches never executed\n",
621 num_never_executed);
622 if (num_branches)
623 for (i = 0; i < 10; i++)
624 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
625 (hist_br_prob[i] + hist_br_prob[19-i]) * 100 / num_branches,
626 5 * i, 5 * i + 5);
627
628 total_num_branches += num_branches;
629 total_num_never_executed += num_never_executed;
630 for (i = 0; i < 20; i++)
631 total_hist_br_prob[i] += hist_br_prob[i];
632
633 fputc ('\n', dump_file);
634 fputc ('\n', dump_file);
635 }
636
637 free_aux_for_blocks ();
638 }
639
640 /* Load value histograms values whose description is stored in VALUES array
641 from .gcda file. */
642
643 static void
644 compute_value_histograms (histogram_values values)
645 {
646 unsigned i, j, t, any;
647 unsigned n_histogram_counters[GCOV_N_VALUE_COUNTERS];
648 gcov_type *histogram_counts[GCOV_N_VALUE_COUNTERS];
649 gcov_type *act_count[GCOV_N_VALUE_COUNTERS];
650 gcov_type *aact_count;
651 histogram_value hist;
652
653 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
654 n_histogram_counters[t] = 0;
655
656 for (i = 0; i < VEC_length (histogram_value, values); i++)
657 {
658 hist = VEC_index (histogram_value, values, i);
659 n_histogram_counters[(int) hist->type] += hist->n_counters;
660 }
661
662 any = 0;
663 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
664 {
665 if (!n_histogram_counters[t])
666 {
667 histogram_counts[t] = NULL;
668 continue;
669 }
670
671 histogram_counts[t] =
672 get_coverage_counts (COUNTER_FOR_HIST_TYPE (t),
673 n_histogram_counters[t], NULL);
674 if (histogram_counts[t])
675 any = 1;
676 act_count[t] = histogram_counts[t];
677 }
678 if (!any)
679 return;
680
681 for (i = 0; i < VEC_length (histogram_value, values); i++)
682 {
683 rtx hist_list = NULL_RTX;
684
685 hist = VEC_index (histogram_value, values, i);
686 t = (int) hist->type;
687
688 aact_count = act_count[t];
689 act_count[t] += hist->n_counters;
690
691 if (!ir_type ())
692 {
693 for (j = hist->n_counters; j > 0; j--)
694 hist_list = alloc_EXPR_LIST (0, GEN_INT (aact_count[j - 1]),
695 hist_list);
696 hist_list = alloc_EXPR_LIST (0,
697 copy_rtx (hist->hvalue.rtl.value), hist_list);
698 hist_list = alloc_EXPR_LIST (0, GEN_INT (hist->type), hist_list);
699 REG_NOTES (hist->hvalue.rtl.insn) =
700 alloc_EXPR_LIST (REG_VALUE_PROFILE, hist_list,
701 REG_NOTES (hist->hvalue.rtl.insn));
702 }
703 else
704 {
705 tree stmt = hist->hvalue.tree.stmt;
706 stmt_ann_t ann = get_stmt_ann (stmt);
707 hist->hvalue.tree.next = ann->histograms;
708 ann->histograms = hist;
709 hist->hvalue.tree.counters =
710 xmalloc (sizeof (gcov_type) * hist->n_counters);
711 for (j = 0; j < hist->n_counters; j++)
712 hist->hvalue.tree.counters[j] = aact_count[j];
713 }
714 }
715
716 for (t = 0; t < GCOV_N_VALUE_COUNTERS; t++)
717 if (histogram_counts[t])
718 free (histogram_counts[t]);
719 }
720
721 #define BB_TO_GCOV_INDEX(bb) ((bb)->index + 1)
722 /* When passed NULL as file_name, initialize.
723 When passed something else, output the necessary commands to change
724 line to LINE and offset to FILE_NAME. */
725 static void
726 output_location (char const *file_name, int line,
727 gcov_position_t *offset, basic_block bb)
728 {
729 static char const *prev_file_name;
730 static int prev_line;
731 bool name_differs, line_differs;
732
733 if (!file_name)
734 {
735 prev_file_name = NULL;
736 prev_line = -1;
737 return;
738 }
739
740 name_differs = !prev_file_name || strcmp (file_name, prev_file_name);
741 line_differs = prev_line != line;
742
743 if (name_differs || line_differs)
744 {
745 if (!*offset)
746 {
747 *offset = gcov_write_tag (GCOV_TAG_LINES);
748 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
749 name_differs = line_differs=true;
750 }
751
752 /* If this is a new source file, then output the
753 file's name to the .bb file. */
754 if (name_differs)
755 {
756 prev_file_name = file_name;
757 gcov_write_unsigned (0);
758 gcov_write_string (prev_file_name);
759 }
760 if (line_differs)
761 {
762 gcov_write_unsigned (line);
763 prev_line = line;
764 }
765 }
766 }
767
768 /* Instrument and/or analyze program behavior based on program flow graph.
769 In either case, this function builds a flow graph for the function being
770 compiled. The flow graph is stored in BB_GRAPH.
771
772 When FLAG_PROFILE_ARCS is nonzero, this function instruments the edges in
773 the flow graph that are needed to reconstruct the dynamic behavior of the
774 flow graph.
775
776 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
777 information from a data file containing edge count information from previous
778 executions of the function being compiled. In this case, the flow graph is
779 annotated with actual execution counts, which are later propagated into the
780 rtl for optimization purposes.
781
782 Main entry point of this file. */
783
784 void
785 branch_prob (void)
786 {
787 basic_block bb;
788 unsigned i;
789 unsigned num_edges, ignored_edges;
790 unsigned num_instrumented;
791 struct edge_list *el;
792 histogram_values values = NULL;
793
794 total_num_times_called++;
795
796 flow_call_edges_add (NULL);
797 add_noreturn_fake_exit_edges ();
798
799 /* We can't handle cyclic regions constructed using abnormal edges.
800 To avoid these we replace every source of abnormal edge by a fake
801 edge from entry node and every destination by fake edge to exit.
802 This keeps graph acyclic and our calculation exact for all normal
803 edges except for exit and entrance ones.
804
805 We also add fake exit edges for each call and asm statement in the
806 basic, since it may not return. */
807
808 FOR_EACH_BB (bb)
809 {
810 int need_exit_edge = 0, need_entry_edge = 0;
811 int have_exit_edge = 0, have_entry_edge = 0;
812 edge e;
813 edge_iterator ei;
814
815 /* Functions returning multiple times are not handled by extra edges.
816 Instead we simply allow negative counts on edges from exit to the
817 block past call and corresponding probabilities. We can't go
818 with the extra edges because that would result in flowgraph that
819 needs to have fake edges outside the spanning tree. */
820
821 FOR_EACH_EDGE (e, ei, bb->succs)
822 {
823 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
824 && e->dest != EXIT_BLOCK_PTR)
825 need_exit_edge = 1;
826 if (e->dest == EXIT_BLOCK_PTR)
827 have_exit_edge = 1;
828 }
829 FOR_EACH_EDGE (e, ei, bb->preds)
830 {
831 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
832 && e->src != ENTRY_BLOCK_PTR)
833 need_entry_edge = 1;
834 if (e->src == ENTRY_BLOCK_PTR)
835 have_entry_edge = 1;
836 }
837
838 if (need_exit_edge && !have_exit_edge)
839 {
840 if (dump_file)
841 fprintf (dump_file, "Adding fake exit edge to bb %i\n",
842 bb->index);
843 make_edge (bb, EXIT_BLOCK_PTR, EDGE_FAKE);
844 }
845 if (need_entry_edge && !have_entry_edge)
846 {
847 if (dump_file)
848 fprintf (dump_file, "Adding fake entry edge to bb %i\n",
849 bb->index);
850 make_edge (ENTRY_BLOCK_PTR, bb, EDGE_FAKE);
851 }
852 }
853
854 el = create_edge_list ();
855 num_edges = NUM_EDGES (el);
856 alloc_aux_for_edges (sizeof (struct edge_info));
857
858 /* The basic blocks are expected to be numbered sequentially. */
859 compact_blocks ();
860
861 ignored_edges = 0;
862 for (i = 0 ; i < num_edges ; i++)
863 {
864 edge e = INDEX_EDGE (el, i);
865 e->count = 0;
866
867 /* Mark edges we've replaced by fake edges above as ignored. */
868 if ((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL))
869 && e->src != ENTRY_BLOCK_PTR && e->dest != EXIT_BLOCK_PTR)
870 {
871 EDGE_INFO (e)->ignore = 1;
872 ignored_edges++;
873 }
874 }
875
876 /* Create spanning tree from basic block graph, mark each edge that is
877 on the spanning tree. We insert as many abnormal and critical edges
878 as possible to minimize number of edge splits necessary. */
879
880 find_spanning_tree (el);
881
882 /* Fake edges that are not on the tree will not be instrumented, so
883 mark them ignored. */
884 for (num_instrumented = i = 0; i < num_edges; i++)
885 {
886 edge e = INDEX_EDGE (el, i);
887 struct edge_info *inf = EDGE_INFO (e);
888
889 if (inf->ignore || inf->on_tree)
890 /*NOP*/;
891 else if (e->flags & EDGE_FAKE)
892 {
893 inf->ignore = 1;
894 ignored_edges++;
895 }
896 else
897 num_instrumented++;
898 }
899
900 total_num_blocks += n_basic_blocks + 2;
901 if (dump_file)
902 fprintf (dump_file, "%d basic blocks\n", n_basic_blocks);
903
904 total_num_edges += num_edges;
905 if (dump_file)
906 fprintf (dump_file, "%d edges\n", num_edges);
907
908 total_num_edges_ignored += ignored_edges;
909 if (dump_file)
910 fprintf (dump_file, "%d ignored edges\n", ignored_edges);
911
912 /* Write the data from which gcov can reconstruct the basic block
913 graph. */
914
915 /* Basic block flags */
916 if (coverage_begin_output ())
917 {
918 gcov_position_t offset;
919
920 offset = gcov_write_tag (GCOV_TAG_BLOCKS);
921 for (i = 0; i != (unsigned) (n_basic_blocks + 2); i++)
922 gcov_write_unsigned (0);
923 gcov_write_length (offset);
924 }
925
926 /* Keep all basic block indexes nonnegative in the gcov output.
927 Index 0 is used for entry block, last index is for exit block.
928 */
929 ENTRY_BLOCK_PTR->index = -1;
930 EXIT_BLOCK_PTR->index = last_basic_block;
931
932 /* Arcs */
933 if (coverage_begin_output ())
934 {
935 gcov_position_t offset;
936
937 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
938 {
939 edge e;
940 edge_iterator ei;
941
942 offset = gcov_write_tag (GCOV_TAG_ARCS);
943 gcov_write_unsigned (BB_TO_GCOV_INDEX (bb));
944
945 FOR_EACH_EDGE (e, ei, bb->succs)
946 {
947 struct edge_info *i = EDGE_INFO (e);
948 if (!i->ignore)
949 {
950 unsigned flag_bits = 0;
951
952 if (i->on_tree)
953 flag_bits |= GCOV_ARC_ON_TREE;
954 if (e->flags & EDGE_FAKE)
955 flag_bits |= GCOV_ARC_FAKE;
956 if (e->flags & EDGE_FALLTHRU)
957 flag_bits |= GCOV_ARC_FALLTHROUGH;
958 /* On trees we don't have fallthru flags, but we can
959 recompute them from CFG shape. */
960 if (ir_type ()
961 && e->flags & (EDGE_TRUE_VALUE | EDGE_FALSE_VALUE)
962 && e->src->next_bb == e->dest)
963 flag_bits |= GCOV_ARC_FALLTHROUGH;
964
965 gcov_write_unsigned (BB_TO_GCOV_INDEX (e->dest));
966 gcov_write_unsigned (flag_bits);
967 }
968 }
969
970 gcov_write_length (offset);
971 }
972 }
973
974 /* Line numbers. */
975 if (coverage_begin_output ())
976 {
977 /* Initialize the output. */
978 output_location (NULL, 0, NULL, NULL);
979
980 if (!ir_type ())
981 {
982 gcov_position_t offset;
983
984 FOR_EACH_BB (bb)
985 {
986 rtx insn = BB_HEAD (bb);
987 int ignore_next_note = 0;
988
989 offset = 0;
990
991 /* We are looking for line number notes. Search backward
992 before basic block to find correct ones. */
993 insn = prev_nonnote_insn (insn);
994 if (!insn)
995 insn = get_insns ();
996 else
997 insn = NEXT_INSN (insn);
998
999 while (insn != BB_END (bb))
1000 {
1001 if (NOTE_P (insn))
1002 {
1003 /* Must ignore the line number notes that
1004 immediately follow the end of an inline function
1005 to avoid counting it twice. There is a note
1006 before the call, and one after the call. */
1007 if (NOTE_LINE_NUMBER (insn)
1008 == NOTE_INSN_REPEATED_LINE_NUMBER)
1009 ignore_next_note = 1;
1010 else if (NOTE_LINE_NUMBER (insn) <= 0)
1011 /*NOP*/;
1012 else if (ignore_next_note)
1013 ignore_next_note = 0;
1014 else
1015 {
1016 expanded_location s;
1017 NOTE_EXPANDED_LOCATION (s, insn);
1018 output_location (s.file, s.line, &offset, bb);
1019 }
1020 }
1021 insn = NEXT_INSN (insn);
1022 }
1023
1024 if (offset)
1025 {
1026 /* A file of NULL indicates the end of run. */
1027 gcov_write_unsigned (0);
1028 gcov_write_string (NULL);
1029 gcov_write_length (offset);
1030 }
1031 }
1032 }
1033 else
1034 {
1035 gcov_position_t offset;
1036
1037 FOR_EACH_BB (bb)
1038 {
1039 block_stmt_iterator bsi;
1040
1041 offset = 0;
1042
1043 if (bb == ENTRY_BLOCK_PTR->next_bb)
1044 {
1045 expanded_location curr_location =
1046 expand_location (DECL_SOURCE_LOCATION
1047 (current_function_decl));
1048 output_location (curr_location.file, curr_location.line,
1049 &offset, bb);
1050 }
1051
1052 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1053 {
1054 tree stmt = bsi_stmt (bsi);
1055 if (EXPR_HAS_LOCATION (stmt))
1056 output_location (EXPR_FILENAME (stmt),
1057 EXPR_LINENO (stmt),
1058 &offset, bb);
1059 }
1060
1061 /* Notice GOTO expressions we eliminated while constructing the
1062 CFG. */
1063 if (single_succ_p (bb) && single_succ_edge (bb)->goto_locus)
1064 {
1065 /* ??? source_locus type is marked deprecated in input.h. */
1066 source_locus curr_location = single_succ_edge (bb)->goto_locus;
1067 /* ??? The FILE/LINE API is inconsistent for these cases. */
1068 #ifdef USE_MAPPED_LOCATION
1069 output_location (LOCATION_FILE (curr_location),
1070 LOCATION_LINE (curr_location),
1071 &offset, bb);
1072 #else
1073 output_location (curr_location->file, curr_location->line,
1074 &offset, bb);
1075 #endif
1076 }
1077
1078 if (offset)
1079 {
1080 /* A file of NULL indicates the end of run. */
1081 gcov_write_unsigned (0);
1082 gcov_write_string (NULL);
1083 gcov_write_length (offset);
1084 }
1085 }
1086 }
1087 }
1088
1089 ENTRY_BLOCK_PTR->index = ENTRY_BLOCK;
1090 EXIT_BLOCK_PTR->index = EXIT_BLOCK;
1091 #undef BB_TO_GCOV_INDEX
1092
1093 if (flag_profile_values)
1094 find_values_to_profile (&values);
1095
1096 if (flag_branch_probabilities)
1097 {
1098 compute_branch_probabilities ();
1099 if (flag_profile_values)
1100 compute_value_histograms (values);
1101 }
1102
1103 remove_fake_edges ();
1104
1105 /* For each edge not on the spanning tree, add counting code. */
1106 if (profile_arc_flag
1107 && coverage_counter_alloc (GCOV_COUNTER_ARCS, num_instrumented))
1108 {
1109 unsigned n_instrumented;
1110
1111 profile_hooks->init_edge_profiler ();
1112
1113 n_instrumented = instrument_edges (el);
1114
1115 gcc_assert (n_instrumented == num_instrumented);
1116
1117 if (flag_profile_values)
1118 instrument_values (values);
1119
1120 /* Commit changes done by instrumentation. */
1121 if (ir_type ())
1122 bsi_commit_edge_inserts ();
1123 else
1124 {
1125 commit_edge_insertions_watch_calls ();
1126 allocate_reg_info (max_reg_num (), FALSE, FALSE);
1127 }
1128 }
1129
1130 free_aux_for_edges ();
1131
1132 if (!ir_type ())
1133 {
1134 /* Re-merge split basic blocks and the mess introduced by
1135 insert_insn_on_edge. */
1136 cleanup_cfg (profile_arc_flag ? CLEANUP_EXPENSIVE : 0);
1137 if (profile_dump_file())
1138 dump_flow_info (profile_dump_file());
1139 }
1140
1141 free_edge_list (el);
1142 if (flag_branch_probabilities)
1143 profile_status = PROFILE_READ;
1144 }
1145 \f
1146 /* Union find algorithm implementation for the basic blocks using
1147 aux fields. */
1148
1149 static basic_block
1150 find_group (basic_block bb)
1151 {
1152 basic_block group = bb, bb1;
1153
1154 while ((basic_block) group->aux != group)
1155 group = (basic_block) group->aux;
1156
1157 /* Compress path. */
1158 while ((basic_block) bb->aux != group)
1159 {
1160 bb1 = (basic_block) bb->aux;
1161 bb->aux = (void *) group;
1162 bb = bb1;
1163 }
1164 return group;
1165 }
1166
1167 static void
1168 union_groups (basic_block bb1, basic_block bb2)
1169 {
1170 basic_block bb1g = find_group (bb1);
1171 basic_block bb2g = find_group (bb2);
1172
1173 /* ??? I don't have a place for the rank field. OK. Lets go w/o it,
1174 this code is unlikely going to be performance problem anyway. */
1175 gcc_assert (bb1g != bb2g);
1176
1177 bb1g->aux = bb2g;
1178 }
1179 \f
1180 /* This function searches all of the edges in the program flow graph, and puts
1181 as many bad edges as possible onto the spanning tree. Bad edges include
1182 abnormals edges, which can't be instrumented at the moment. Since it is
1183 possible for fake edges to form a cycle, we will have to develop some
1184 better way in the future. Also put critical edges to the tree, since they
1185 are more expensive to instrument. */
1186
1187 static void
1188 find_spanning_tree (struct edge_list *el)
1189 {
1190 int i;
1191 int num_edges = NUM_EDGES (el);
1192 basic_block bb;
1193
1194 /* We use aux field for standard union-find algorithm. */
1195 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1196 bb->aux = bb;
1197
1198 /* Add fake edge exit to entry we can't instrument. */
1199 union_groups (EXIT_BLOCK_PTR, ENTRY_BLOCK_PTR);
1200
1201 /* First add all abnormal edges to the tree unless they form a cycle. Also
1202 add all edges to EXIT_BLOCK_PTR to avoid inserting profiling code behind
1203 setting return value from function. */
1204 for (i = 0; i < num_edges; i++)
1205 {
1206 edge e = INDEX_EDGE (el, i);
1207 if (((e->flags & (EDGE_ABNORMAL | EDGE_ABNORMAL_CALL | EDGE_FAKE))
1208 || e->dest == EXIT_BLOCK_PTR)
1209 && !EDGE_INFO (e)->ignore
1210 && (find_group (e->src) != find_group (e->dest)))
1211 {
1212 if (dump_file)
1213 fprintf (dump_file, "Abnormal edge %d to %d put to tree\n",
1214 e->src->index, e->dest->index);
1215 EDGE_INFO (e)->on_tree = 1;
1216 union_groups (e->src, e->dest);
1217 }
1218 }
1219
1220 /* Now insert all critical edges to the tree unless they form a cycle. */
1221 for (i = 0; i < num_edges; i++)
1222 {
1223 edge e = INDEX_EDGE (el, i);
1224 if (EDGE_CRITICAL_P (e) && !EDGE_INFO (e)->ignore
1225 && find_group (e->src) != find_group (e->dest))
1226 {
1227 if (dump_file)
1228 fprintf (dump_file, "Critical edge %d to %d put to tree\n",
1229 e->src->index, e->dest->index);
1230 EDGE_INFO (e)->on_tree = 1;
1231 union_groups (e->src, e->dest);
1232 }
1233 }
1234
1235 /* And now the rest. */
1236 for (i = 0; i < num_edges; i++)
1237 {
1238 edge e = INDEX_EDGE (el, i);
1239 if (!EDGE_INFO (e)->ignore
1240 && find_group (e->src) != find_group (e->dest))
1241 {
1242 if (dump_file)
1243 fprintf (dump_file, "Normal edge %d to %d put to tree\n",
1244 e->src->index, e->dest->index);
1245 EDGE_INFO (e)->on_tree = 1;
1246 union_groups (e->src, e->dest);
1247 }
1248 }
1249
1250 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, NULL, next_bb)
1251 bb->aux = NULL;
1252 }
1253 \f
1254 /* Perform file-level initialization for branch-prob processing. */
1255
1256 void
1257 init_branch_prob (void)
1258 {
1259 int i;
1260
1261 total_num_blocks = 0;
1262 total_num_edges = 0;
1263 total_num_edges_ignored = 0;
1264 total_num_edges_instrumented = 0;
1265 total_num_blocks_created = 0;
1266 total_num_passes = 0;
1267 total_num_times_called = 0;
1268 total_num_branches = 0;
1269 total_num_never_executed = 0;
1270 for (i = 0; i < 20; i++)
1271 total_hist_br_prob[i] = 0;
1272 }
1273
1274 /* Performs file-level cleanup after branch-prob processing
1275 is completed. */
1276
1277 void
1278 end_branch_prob (void)
1279 {
1280 if (dump_file)
1281 {
1282 fprintf (dump_file, "\n");
1283 fprintf (dump_file, "Total number of blocks: %d\n",
1284 total_num_blocks);
1285 fprintf (dump_file, "Total number of edges: %d\n", total_num_edges);
1286 fprintf (dump_file, "Total number of ignored edges: %d\n",
1287 total_num_edges_ignored);
1288 fprintf (dump_file, "Total number of instrumented edges: %d\n",
1289 total_num_edges_instrumented);
1290 fprintf (dump_file, "Total number of blocks created: %d\n",
1291 total_num_blocks_created);
1292 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1293 total_num_passes);
1294 if (total_num_times_called != 0)
1295 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1296 (total_num_passes + (total_num_times_called >> 1))
1297 / total_num_times_called);
1298 fprintf (dump_file, "Total number of branches: %d\n",
1299 total_num_branches);
1300 fprintf (dump_file, "Total number of branches never executed: %d\n",
1301 total_num_never_executed);
1302 if (total_num_branches)
1303 {
1304 int i;
1305
1306 for (i = 0; i < 10; i++)
1307 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1308 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1309 / total_num_branches, 5*i, 5*i+5);
1310 }
1311 }
1312 }
1313
1314 /* Set up hooks to enable tree-based profiling. */
1315
1316 void
1317 tree_register_profile_hooks (void)
1318 {
1319 gcc_assert (ir_type ());
1320 profile_hooks = &tree_profile_hooks;
1321 }
1322
1323 /* Set up hooks to enable RTL-based profiling. */
1324
1325 void
1326 rtl_register_profile_hooks (void)
1327 {
1328 gcc_assert (!ir_type ());
1329 profile_hooks = &rtl_profile_hooks;
1330 }