]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/profile.c
toplev.h: New file.
[thirdparty/gcc.git] / gcc / profile.c
CommitLineData
86144b75 1/* Calculate branch probabilities, and basic block execution counts.
e5e809f4 2 Copyright (C) 1990, 91-94, 96, 97, 1998 Free Software Foundation, Inc.
86144b75
DE
3 Contributed by James E. Wilson, UC Berkeley/Cygnus Support;
4 based on some ideas from Dain Samples of UC Berkeley.
5 Further mangling by Bob Manson, Cygnus Support.
6
7This file is part of GNU CC.
8
9GNU CC is free software; you can redistribute it and/or modify
10it under the terms of the GNU General Public License as published by
11the Free Software Foundation; either version 2, or (at your option)
12any later version.
13
14GNU CC is distributed in the hope that it will be useful,
15but WITHOUT ANY WARRANTY; without even the implied warranty of
16MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17GNU General Public License for more details.
18
19You should have received a copy of the GNU General Public License
20along with GNU CC; see the file COPYING. If not, write to
21the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
22
23/* ??? Really should not put insns inside of LIBCALL sequences, when putting
24 insns after a call, should look for the insn setting the retval, and
25 insert the insns after that one. */
26
27/* ??? Register allocation should use basic block execution counts to
28 give preference to the most commonly executed blocks. */
29
30/* ??? The .da files are not safe. Changing the program after creating .da
31 files or using different options when compiling with -fbranch-probabilities
32 can result the arc data not matching the program. Maybe add instrumented
33 arc count to .bbg file? Maybe check whether PFG matches the .bbg file? */
34
35/* ??? Should calculate branch probabilities before instrumenting code, since
36 then we can use arc counts to help decide which arcs to instrument. */
37
38/* ??? Rearrange code so that the most frequently executed arcs become from
39 one block to the next block (i.e. a fall through), move seldom executed
40 code outside of loops even at the expense of adding a few branches to
41 achieve this, see Dain Sample's UC Berkeley thesis. */
42
43#include "config.h"
670ee920 44#include "system.h"
86144b75
DE
45#include "rtl.h"
46#include "flags.h"
47#include "insn-flags.h"
48#include "insn-config.h"
49#include "output.h"
e9a25f70 50#include "regs.h"
86144b75
DE
51#include "tree.h"
52#include "output.h"
53#include "gcov-io.h"
10f0ad3d 54#include "toplev.h"
86144b75
DE
55
56extern char * xmalloc ();
86144b75
DE
57
58/* One of these is dynamically created whenever we identify an arc in the
59 function. */
60
61struct adj_list
62{
63 int source;
64 int target;
65 int arc_count;
66 unsigned int count_valid : 1;
67 unsigned int on_tree : 1;
68 unsigned int fake : 1;
69 unsigned int fall_through : 1;
70 rtx branch_insn;
71 struct adj_list *pred_next;
72 struct adj_list *succ_next;
73};
74
75#define ARC_TARGET(ARCPTR) (ARCPTR->target)
76#define ARC_SOURCE(ARCPTR) (ARCPTR->source)
77#define ARC_COUNT(ARCPTR) (ARCPTR->arc_count)
78
79/* Count the number of basic blocks, and create an array of these structures,
80 one for each bb in the function. */
81
82struct bb_info
83{
84 struct adj_list *succ;
85 struct adj_list *pred;
86 int succ_count;
87 int pred_count;
88 int exec_count;
89 unsigned int count_valid : 1;
90 unsigned int on_tree : 1;
91 rtx first_insn;
92};
93
94/* Indexed by label number, gives the basic block number containing that
95 label. */
96
97static int *label_to_bb;
98
99/* Number of valid entries in the label_to_bb array. */
100
101static int label_to_bb_size;
102
103/* Indexed by block index, holds the basic block graph. */
104
105static struct bb_info *bb_graph;
106
107/* Name and file pointer of the output file for the basic block graph. */
108
109static char *bbg_file_name;
110static FILE *bbg_file;
111
112/* Name and file pointer of the input file for the arc count data. */
113
114static char *da_file_name;
115static FILE *da_file;
116
117/* Pointer of the output file for the basic block/line number map. */
118static FILE *bb_file;
119
120/* Last source file name written to bb_file. */
121
122static char *last_bb_file_name;
123
124/* Indicates whether the next line number note should be output to
125 bb_file or not. Used to eliminate a redundant note after an
126 expanded inline function call. */
127
128static int ignore_next_note;
129
130/* Used by final, for allocating the proper amount of storage for the
131 instrumented arc execution counts. */
132
133int count_instrumented_arcs;
134
135/* Number of executions for the return label. */
136
137int return_label_execution_count;
138
139/* Collect statistics on the performance of this pass for the entire source
140 file. */
141
142static int total_num_blocks;
143static int total_num_arcs;
144static int total_num_arcs_instrumented;
145static int total_num_blocks_created;
146static int total_num_passes;
147static int total_num_times_called;
148static int total_hist_br_prob[20];
149static int total_num_never_executed;
150static int total_num_branches;
151
152/* Forward declarations. */
153static void init_arc PROTO((struct adj_list *, int, int, rtx));
154static void find_spanning_tree PROTO((int));
155static void expand_spanning_tree PROTO((int));
156static void fill_spanning_tree PROTO((int));
157static void init_arc_profiler PROTO((void));
158static void output_arc_profiler PROTO((int, rtx));
159
160#ifndef LONG_TYPE_SIZE
161#define LONG_TYPE_SIZE BITS_PER_WORD
162#endif
163
164/* If non-zero, we need to output a constructor to set up the
165 per-object-file data. */
166static int need_func_profiler = 0;
167
168\f
169/* Add arc instrumentation code to the entire insn chain.
170
171 F is the first insn of the chain.
172 NUM_BLOCKS is the number of basic blocks found in F.
173 DUMP_FILE, if nonzero, is an rtl dump file we can write to. */
174
175static void
176instrument_arcs (f, num_blocks, dump_file)
177 rtx f;
178 int num_blocks;
179 FILE *dump_file;
180{
181 register int i;
182 register struct adj_list *arcptr, *backptr;
183 int num_arcs = 0;
184 int num_instr_arcs = 0;
185 rtx insn;
186
86144b75
DE
187 /* Instrument the program start. */
188 /* Handle block 0 specially, since it will always be instrumented,
189 but it doesn't have a valid first_insn or branch_insn. We must
190 put the instructions before the NOTE_INSN_FUNCTION_BEG note, so
191 that they don't clobber any of the parameters of the current
192 function. */
193 for (insn = f; insn; insn = NEXT_INSN (insn))
194 if (GET_CODE (insn) == NOTE
195 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
196 break;
197 insn = PREV_INSN (insn);
198 need_func_profiler = 1;
199 output_arc_profiler (total_num_arcs_instrumented + num_instr_arcs++, insn);
200
201 for (i = 1; i < num_blocks; i++)
202 for (arcptr = bb_graph[i].succ; arcptr; arcptr = arcptr->succ_next)
203 if (! arcptr->on_tree)
204 {
205 if (dump_file)
206 fprintf (dump_file, "Arc %d to %d instrumented\n", i,
207 ARC_TARGET (arcptr));
208
209 /* Check to see if this arc is the only exit from its source block,
210 or the only entrance to its target block. In either case,
211 we don't need to create a new block to instrument the arc. */
212
213 if (bb_graph[i].succ == arcptr && arcptr->succ_next == 0)
214 {
215 /* Instrument the source block. */
216 output_arc_profiler (total_num_arcs_instrumented
217 + num_instr_arcs++,
218 PREV_INSN (bb_graph[i].first_insn));
219 }
220 else if (arcptr == bb_graph[ARC_TARGET (arcptr)].pred
221 && arcptr->pred_next == 0)
222 {
223 /* Instrument the target block. */
224 output_arc_profiler (total_num_arcs_instrumented
225 + num_instr_arcs++,
226 PREV_INSN (bb_graph[ARC_TARGET (arcptr)].first_insn));
227 }
228 else if (arcptr->fall_through)
229 {
230 /* This is a fall-through; put the instrumentation code after
231 the branch that ends this block. */
232
233 for (backptr = bb_graph[i].succ; backptr;
234 backptr = backptr->succ_next)
235 if (backptr != arcptr)
236 break;
237
238 output_arc_profiler (total_num_arcs_instrumented
239 + num_instr_arcs++,
240 backptr->branch_insn);
241 }
242 else
243 {
244 /* Must emit a new basic block to hold the arc counting code. */
245 enum rtx_code code = GET_CODE (PATTERN (arcptr->branch_insn));
246
247 if (code == SET)
248 {
249 /* Create the new basic block right after the branch.
250 Invert the branch so that it jumps past the end of the new
251 block. The new block will consist of the instrumentation
252 code, and a jump to the target of this arc. */
253 int this_is_simplejump = simplejump_p (arcptr->branch_insn);
254 rtx new_label = gen_label_rtx ();
255 rtx old_label, set_src;
256 rtx after = arcptr->branch_insn;
257
258 /* Simplejumps can't reach here. */
259 if (this_is_simplejump)
260 abort ();
261
262 /* We can't use JUMP_LABEL, because it won't be set if we
263 are compiling without optimization. */
264
265 set_src = SET_SRC (single_set (arcptr->branch_insn));
266 if (GET_CODE (set_src) == LABEL_REF)
267 old_label = set_src;
268 else if (GET_CODE (set_src) != IF_THEN_ELSE)
269 abort ();
270 else if (XEXP (set_src, 1) == pc_rtx)
271 old_label = XEXP (XEXP (set_src, 2), 0);
272 else
273 old_label = XEXP (XEXP (set_src, 1), 0);
274
275 /* Set the JUMP_LABEL so that redirect_jump will work. */
276 JUMP_LABEL (arcptr->branch_insn) = old_label;
277
278 /* Add a use for OLD_LABEL that will be needed when we emit
279 the JUMP_INSN below. If we don't do this here,
280 `invert_jump' might delete it for us. We must add two
281 when not optimizing, because the NUSES is zero now,
282 but must be at least two to prevent the label from being
283 deleted. */
284 LABEL_NUSES (old_label) += 2;
285
286 /* Emit the insns for the new block in reverse order,
287 since that is most convenient. */
288
289 if (this_is_simplejump)
290 {
291 after = NEXT_INSN (arcptr->branch_insn);
292 if (! redirect_jump (arcptr->branch_insn, new_label))
293 /* Don't know what to do if this branch won't
294 redirect. */
295 abort ();
296 }
297 else
298 {
299 if (! invert_jump (arcptr->branch_insn, new_label))
300 /* Don't know what to do if this branch won't invert. */
301 abort ();
302
303 emit_label_after (new_label, after);
304 LABEL_NUSES (new_label)++;
305 }
306 emit_barrier_after (after);
307 emit_jump_insn_after (gen_jump (old_label), after);
308 JUMP_LABEL (NEXT_INSN (after)) = old_label;
309
310 /* Instrument the source arc. */
311 output_arc_profiler (total_num_arcs_instrumented
312 + num_instr_arcs++,
313 after);
314 if (this_is_simplejump)
315 {
316 emit_label_after (new_label, after);
317 LABEL_NUSES (new_label)++;
318 }
319 }
320 else if (code == ADDR_VEC || code == ADDR_DIFF_VEC)
321 {
322 /* A table jump. Create a new basic block immediately
323 after the table, by emitting a barrier, a label, a
324 counting note, and a jump to the old label. Put the
325 new label in the table. */
326
327 rtx new_label = gen_label_rtx ();
328 rtx old_lref, new_lref;
329 int index;
330
331 /* Must determine the old_label reference, do this
332 by counting the arcs after this one, which will
333 give the index of our label in the table. */
334
335 index = 0;
336 for (backptr = arcptr->succ_next; backptr;
337 backptr = backptr->succ_next)
338 index++;
339
340 old_lref = XVECEXP (PATTERN (arcptr->branch_insn),
341 (code == ADDR_DIFF_VEC), index);
342
343 /* Emit the insns for the new block in reverse order,
344 since that is most convenient. */
345 emit_jump_insn_after (gen_jump (XEXP (old_lref, 0)),
346 arcptr->branch_insn);
347 JUMP_LABEL (NEXT_INSN (arcptr->branch_insn))
348 = XEXP (old_lref, 0);
349
350 /* Instrument the source arc. */
351 output_arc_profiler (total_num_arcs_instrumented
352 + num_instr_arcs++,
353 arcptr->branch_insn);
354
355 emit_label_after (new_label, arcptr->branch_insn);
356 LABEL_NUSES (NEXT_INSN (arcptr->branch_insn))++;
357 emit_barrier_after (arcptr->branch_insn);
358
359 /* Fix up the table jump. */
38a448ca 360 new_lref = gen_rtx_LABEL_REF (Pmode, new_label);
86144b75
DE
361 XVECEXP (PATTERN (arcptr->branch_insn),
362 (code == ADDR_DIFF_VEC), index) = new_lref;
363 }
364 else
365 abort ();
366
367 num_arcs += 1;
368 if (dump_file)
369 fprintf (dump_file,
370 "Arc %d to %d needed new basic block\n", i,
371 ARC_TARGET (arcptr));
372 }
373 }
374
375 total_num_arcs_instrumented += num_instr_arcs;
376 count_instrumented_arcs = total_num_arcs_instrumented;
377
378 total_num_blocks_created += num_arcs;
379 if (dump_file)
380 {
381 fprintf (dump_file, "%d arcs instrumented\n", num_instr_arcs);
382 fprintf (dump_file, "%d extra basic blocks created\n", num_arcs);
383 }
384}
385
386/* Output STRING to bb_file, surrounded by DELIMITER. */
387
388static void
389output_gcov_string (string, delimiter)
390 char *string;
391 long delimiter;
392{
393 long temp;
394
395 /* Write a delimiter to indicate that a file name follows. */
396 __write_long (delimiter, bb_file, 4);
397
398 /* Write the string. */
399 temp = strlen (string) + 1;
400 fwrite (string, temp, 1, bb_file);
401
402 /* Append a few zeros, to align the output to a 4 byte boundary. */
403 temp = temp & 0x3;
404 if (temp)
405 {
406 char c[4];
407
408 c[0] = c[1] = c[2] = c[3] = 0;
409 fwrite (c, sizeof (char), 4 - temp, bb_file);
410 }
411
412 /* Store another delimiter in the .bb file, just to make it easy to find the
413 end of the file name. */
414 __write_long (delimiter, bb_file, 4);
415}
416\f
8ade1519
JW
417/* Return TRUE if this insn must be a tablejump entry insn. This works for
418 the MIPS port, but may give false negatives for some targets. */
419
420int
421tablejump_entry_p (insn, label)
422 rtx insn, label;
423{
424 rtx next = next_active_insn (insn);
425 enum rtx_code code = GET_CODE (PATTERN (next));
426
427 if (code != ADDR_DIFF_VEC && code != ADDR_VEC)
428 return 0;
429
430 if (PREV_INSN (next) == XEXP (label, 0))
431 return 1;
432
433 return 0;
434}
435
86144b75
DE
436/* Instrument and/or analyze program behavior based on program flow graph.
437 In either case, this function builds a flow graph for the function being
438 compiled. The flow graph is stored in BB_GRAPH.
439
440 When FLAG_PROFILE_ARCS is nonzero, this function instruments the arcs in
441 the flow graph that are needed to reconstruct the dynamic behavior of the
442 flow graph.
443
956d6950 444 When FLAG_BRANCH_PROBABILITIES is nonzero, this function reads auxiliary
86144b75
DE
445 information from a data file containing arc count information from previous
446 executions of the function being compiled. In this case, the flow graph is
447 annotated with actual execution counts, which are later propagated into the
448 rtl for optimization purposes.
449
450 Main entry point of this file. */
451
452void
453branch_prob (f, dump_file)
454 rtx f;
455 FILE *dump_file;
456{
457 int i, num_blocks;
86144b75
DE
458 struct adj_list *arcptr;
459 int num_arcs, changes, passes;
460 int total, prob;
461 int hist_br_prob[20], num_never_executed, num_branches;
462 /* Set to non-zero if we got bad count information. */
463 int bad_counts = 0;
464
465 /* start of a function. */
466 if (flag_test_coverage)
467 output_gcov_string (current_function_name, (long) -2);
468
469 /* Execute this only if doing arc profiling or branch probabilities. */
470 if (! profile_arc_flag && ! flag_branch_probabilities
471 && ! flag_test_coverage)
472 abort ();
473
474 total_num_times_called++;
475
476 /* Create an array label_to_bb of ints of size max_label_num. */
477 label_to_bb_size = max_label_num ();
478 label_to_bb = (int *) oballoc (label_to_bb_size * sizeof (int));
479 bzero ((char *) label_to_bb, label_to_bb_size * sizeof (int));
480
481 /* Scan the insns in the function, count the number of basic blocks
482 present. When a code label is passed, set label_to_bb[label] = bb
483 number. */
484
485 /* The first block found will be block 1, so that function entry can be
486 block 0. */
487
488 {
489 register RTX_CODE prev_code = JUMP_INSN;
490 register RTX_CODE code;
491 register rtx insn;
492 register int i;
493 int block_separator_emitted = 0;
494
495 ignore_next_note = 0;
496
497 for (insn = NEXT_INSN (f), i = 0; insn; insn = NEXT_INSN (insn))
498 {
499 code = GET_CODE (insn);
500
501 if (code == BARRIER)
502 ;
503 else if (code == CODE_LABEL)
504 /* This label is part of the next block, but we can't increment
505 block number yet since there might be multiple labels. */
506 label_to_bb[CODE_LABEL_NUMBER (insn)] = i + 1;
507 /* We make NOTE_INSN_SETJMP notes into a block of their own, so that
508 they can be the target of the fake arc for the setjmp call.
509 This avoids creating cycles of fake arcs, which would happen if
510 the block after the setjmp call contained a call insn. */
511 else if ((prev_code == JUMP_INSN || prev_code == CALL_INSN
512 || prev_code == CODE_LABEL || prev_code == BARRIER)
513 && (GET_RTX_CLASS (code) == 'i'
db3cf6fb
MS
514 || (code == NOTE
515 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)))
86144b75
DE
516 {
517 i += 1;
518
519 /* Emit the block separator if it hasn't already been emitted. */
520 if (flag_test_coverage && ! block_separator_emitted)
521 {
522 /* Output a zero to the .bb file to indicate that a new
523 block list is starting. */
524 __write_long (0, bb_file, 4);
525 }
526 block_separator_emitted = 0;
527 }
528 /* If flag_test_coverage is true, then we must add an entry to the
529 .bb file for every note. */
530 else if (code == NOTE && flag_test_coverage)
531 {
532 /* Must ignore the line number notes that immediately follow the
533 end of an inline function to avoid counting it twice. There
534 is a note before the call, and one after the call. */
535 if (NOTE_LINE_NUMBER (insn) == NOTE_REPEATED_LINE_NUMBER)
536 ignore_next_note = 1;
537 else if (NOTE_LINE_NUMBER (insn) > 0)
538 {
539 if (ignore_next_note)
540 ignore_next_note = 0;
541 else
542 {
543 /* Emit a block separator here to ensure that a NOTE
544 immediately following a JUMP_INSN or CALL_INSN will end
545 up in the right basic block list. */
546 if ((prev_code == JUMP_INSN || prev_code == CALL_INSN
547 || prev_code == CODE_LABEL || prev_code == BARRIER)
548 && ! block_separator_emitted)
549 {
550 /* Output a zero to the .bb file to indicate that
551 a new block list is starting. */
552 __write_long (0, bb_file, 4);
553
554 block_separator_emitted = 1;
555 }
556
557 /* If this is a new source file, then output the file's
558 name to the .bb file. */
559 if (! last_bb_file_name
560 || strcmp (NOTE_SOURCE_FILE (insn),
561 last_bb_file_name))
562 {
563 if (last_bb_file_name)
564 free (last_bb_file_name);
db3cf6fb
MS
565 last_bb_file_name
566 = xmalloc (strlen (NOTE_SOURCE_FILE (insn)) + 1);
86144b75
DE
567 strcpy (last_bb_file_name, NOTE_SOURCE_FILE (insn));
568 output_gcov_string (NOTE_SOURCE_FILE (insn), (long)-1);
569 }
570
571 /* Output the line number to the .bb file. Must be done
572 after the output_bb_profile_data() call, and after the
573 file name is written, to ensure that it is correctly
574 handled by gcov. */
575 __write_long (NOTE_LINE_NUMBER (insn), bb_file, 4);
576 }
577 }
578 }
579
580 if (code != NOTE)
581 prev_code = code;
582 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
583 prev_code = CALL_INSN;
584 }
585
586 /* Allocate last `normal' entry for bb_graph. */
587
588 /* The last insn was a jump, call, or label. In that case we have
589 a block at the end of the function with no insns. */
590 if (prev_code == JUMP_INSN || prev_code == CALL_INSN
591 || prev_code == CODE_LABEL || prev_code == BARRIER)
592 {
593 i++;
594
595 /* Emit the block separator if it hasn't already been emitted. */
596 if (flag_test_coverage && ! block_separator_emitted)
597 {
598 /* Output a zero to the .bb file to indicate that a new
599 block list is starting. */
600 __write_long (0, bb_file, 4);
601 }
602 }
603
604 /* Create another block to stand for EXIT, and make all return insns, and
605 the last basic block point here. Add one more to account for block
606 zero. */
607 num_blocks = i + 2;
608 }
609
610 total_num_blocks += num_blocks;
611 if (dump_file)
612 fprintf (dump_file, "%d basic blocks\n", num_blocks);
613
614 /* If we are only doing test coverage here, then return now. */
615 if (! profile_arc_flag && ! flag_branch_probabilities)
616 return;
617
618 /* Create and initialize the arrays that will hold bb_graph
619 and execution count info. */
620
621 bb_graph = (struct bb_info *) alloca (num_blocks * sizeof (struct bb_info));
622 bzero ((char *) bb_graph, (sizeof (struct bb_info) * num_blocks));
623
624 {
625 /* Scan the insns again:
626 - at the entry to each basic block, increment the predecessor count
627 (and successor of previous block) if it is a fall through entry,
628 create adj_list entries for this and the previous block
629 - at each jump insn, increment predecessor/successor counts for
630 target/source basic blocks, add this insn to pred/succ lists.
631
632 This also cannot be broken out as a separate subroutine
633 because it uses `alloca'. */
634
635 register RTX_CODE prev_code = JUMP_INSN;
636 register RTX_CODE code;
637 register rtx insn;
638 register int i;
639 int fall_through = 0;
640 struct adj_list *arcptr;
e5e809f4 641 int dest = 0;
86144b75
DE
642
643 /* Block 0 always falls through to block 1. */
644 num_arcs = 0;
645 arcptr = (struct adj_list *) alloca (sizeof (struct adj_list));
646 init_arc (arcptr, 0, 1, 0);
647 arcptr->fall_through = 1;
648 num_arcs++;
649
650 /* Add a fake fall through arc from the last block to block 0, to make the
651 graph complete. */
652 arcptr = (struct adj_list *) alloca (sizeof (struct adj_list));
653 init_arc (arcptr, num_blocks - 1, 0, 0);
654 arcptr->fake = 1;
655 num_arcs++;
656
657 /* Exit must be one node of the graph, and all exits from the function
658 must point there. When see a return branch, must point the arc to the
659 exit node. */
660
661 /* Must start scan with second insn in function as above. */
662 for (insn = NEXT_INSN (f), i = 0; insn; insn = NEXT_INSN (insn))
663 {
664 code = GET_CODE (insn);
665
666 if (code == BARRIER)
667 fall_through = 0;
668 else if (code == CODE_LABEL)
669 ;
670 /* We make NOTE_INSN_SETJMP notes into a block of their own, so that
671 they can be the target of the fake arc for the setjmp call.
672 This avoids creating cycles of fake arcs, which would happen if
673 the block after the setjmp call ended with a call. */
674 else if ((prev_code == JUMP_INSN || prev_code == CALL_INSN
675 || prev_code == CODE_LABEL || prev_code == BARRIER)
676 && (GET_RTX_CLASS (code) == 'i'
db3cf6fb
MS
677 || (code == NOTE
678 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)))
86144b75
DE
679 {
680 /* This is the first insn of the block. */
681 i += 1;
682 if (fall_through)
683 {
684 arcptr = (struct adj_list *) alloca (sizeof (struct adj_list));
685 init_arc (arcptr, i - 1, i, 0);
686 arcptr->fall_through = 1;
687
688 num_arcs++;
689 }
690 fall_through = 1;
691 bb_graph[i].first_insn = insn;
692 }
693 else if (code == NOTE)
9870475c 694 {;}
86144b75
DE
695
696 if (code == CALL_INSN)
697 {
698 /* In the normal case, the call returns, and this is just like
699 a branch fall through. */
700 fall_through = 1;
701
702 /* Setjmp may return more times than called, so to make the graph
703 solvable, add a fake arc from the function entrance to the
704 next block.
705
706 All other functions may return fewer times than called (if
707 a descendent call longjmp or exit), so to make the graph
708 solvable, add a fake arc to the function exit from the
709 current block.
710
711 Distinguish the cases by checking for a SETJUMP note.
712 A call_insn can be the last ins of a function, so must check
713 to see if next insn actually exists. */
714 arcptr = (struct adj_list *) alloca (sizeof (struct adj_list));
715 if (NEXT_INSN (insn)
716 && GET_CODE (NEXT_INSN (insn)) == NOTE
717 && NOTE_LINE_NUMBER (NEXT_INSN (insn)) == NOTE_INSN_SETJMP)
718 init_arc (arcptr, 0, i+1, insn);
719 else
720 init_arc (arcptr, i, num_blocks-1, insn);
721 arcptr->fake = 1;
722 num_arcs++;
723 }
724 else if (code == JUMP_INSN)
725 {
726 rtx tem, pattern = PATTERN (insn);
727 rtx tablejump = 0;
728
729 /* If running without optimization, then jump label won't be valid,
730 so we must search for the destination label in that case.
731 We have to handle tablejumps and returns specially anyways, so
732 we don't check the JUMP_LABEL at all here. */
733
8ade1519
JW
734 /* ??? This code should be rewritten. We need a more elegant way
735 to find the LABEL_REF. We need a more elegant way to
736 differentiate tablejump entries from computed gotos.
737 We should perhaps reuse code from flow to compute the CFG
738 instead of trying to compute it here.
739
740 We can't use current_function_has_computed_jump, because that
741 is calculated later by flow. We can't use computed_jump_p,
742 because that returns true for tablejump entry insns for some
743 targets, e.g. HPPA and MIPS. */
744
86144b75
DE
745 if (GET_CODE (pattern) == PARALLEL)
746 {
8ade1519
JW
747 /* This assumes that PARALLEL jumps with a USE are
748 tablejump entry jumps. The same assumption can be found
749 in computed_jump_p. */
86144b75
DE
750 /* Make an arc from this jump to the label of the
751 jump table. This will instrument the number of
752 times the switch statement is executed. */
753 if (GET_CODE (XVECEXP (pattern, 0, 1)) == USE)
754 {
755 tem = XEXP (XVECEXP (pattern, 0, 1), 0);
756 if (GET_CODE (tem) != LABEL_REF)
757 abort ();
758 dest = label_to_bb[CODE_LABEL_NUMBER (XEXP (tem, 0))];
759 }
760 else if (GET_CODE (XVECEXP (pattern, 0, 0)) == SET
761 && SET_DEST (XVECEXP (pattern, 0, 0)) == pc_rtx)
762 {
763 tem = SET_SRC (XVECEXP (pattern, 0, 0));
764 if (GET_CODE (tem) == PLUS
765 && GET_CODE (XEXP (tem, 1)) == LABEL_REF)
766 {
767 tem = XEXP (tem, 1);
768 dest = label_to_bb [CODE_LABEL_NUMBER (XEXP (tem, 0))];
769 }
770 }
771 else
772 abort ();
773 }
774 else if (GET_CODE (pattern) == ADDR_VEC
775 || GET_CODE (pattern) == ADDR_DIFF_VEC)
776 tablejump = pattern;
777 else if (GET_CODE (pattern) == RETURN)
778 dest = num_blocks - 1;
a7ebd547
JW
779 else if (GET_CODE (pattern) != SET)
780 abort ();
86144b75
DE
781 else if ((tem = SET_SRC (pattern))
782 && GET_CODE (tem) == LABEL_REF)
783 dest = label_to_bb[CODE_LABEL_NUMBER (XEXP (tem, 0))];
a7ebd547
JW
784 /* Recognize HPPA table jump entry. This code is similar to
785 the code above in the PARALLEL case. */
786 else if (GET_CODE (tem) == PLUS
787 && GET_CODE (XEXP (tem, 0)) == MEM
788 && GET_CODE (XEXP (XEXP (tem, 0), 0)) == PLUS
789 && GET_CODE (XEXP (XEXP (XEXP (tem, 0), 0), 0)) == PC
8ade1519
JW
790 && GET_CODE (XEXP (tem, 1)) == LABEL_REF
791 && tablejump_entry_p (insn, XEXP (tem, 1)))
792 dest = label_to_bb[CODE_LABEL_NUMBER (XEXP (XEXP (tem, 1), 0))];
793 /* Recognize the MIPS table jump entry. */
794 else if (GET_CODE (tem) == PLUS
795 && GET_CODE (XEXP (tem, 0)) == REG
796 && GET_CODE (XEXP (tem, 1)) == LABEL_REF
797 && tablejump_entry_p (insn, XEXP (tem, 1)))
a7ebd547 798 dest = label_to_bb[CODE_LABEL_NUMBER (XEXP (XEXP (tem, 1), 0))];
86144b75
DE
799 else
800 {
801 rtx label_ref;
802
11a932c0
JW
803 /* Must be an IF_THEN_ELSE branch. If it isn't, assume it
804 is a computed goto, which aren't supported yet. */
86144b75 805 if (GET_CODE (tem) != IF_THEN_ELSE)
11a932c0 806 fatal ("-fprofile-arcs does not support computed gotos");
86144b75
DE
807 if (XEXP (tem, 1) != pc_rtx)
808 label_ref = XEXP (tem, 1);
809 else
810 label_ref = XEXP (tem, 2);
811 dest = label_to_bb[CODE_LABEL_NUMBER (XEXP (label_ref, 0))];
812 }
813
814 if (tablejump)
815 {
816 int diff_vec_p = GET_CODE (tablejump) == ADDR_DIFF_VEC;
817 int len = XVECLEN (tablejump, diff_vec_p);
818 int k;
819
820 for (k = 0; k < len; k++)
821 {
822 rtx tem = XEXP (XVECEXP (tablejump, diff_vec_p, k), 0);
823 dest = label_to_bb[CODE_LABEL_NUMBER (tem)];
824
825 arcptr = (struct adj_list *) alloca (sizeof(struct adj_list));
826 init_arc (arcptr, i, dest, insn);
827
828 num_arcs++;
829 }
830 }
831 else
832 {
833 arcptr = (struct adj_list *) alloca (sizeof (struct adj_list));
834 init_arc (arcptr, i, dest, insn);
835
836 num_arcs++;
837 }
838
839 /* Determine whether or not this jump will fall through.
840 Unconditional jumps and returns are not always followed by
841 barriers. */
842 pattern = PATTERN (insn);
843 if (GET_CODE (pattern) == PARALLEL
844 || GET_CODE (pattern) == RETURN)
845 fall_through = 0;
846 else if (GET_CODE (pattern) == ADDR_VEC
847 || GET_CODE (pattern) == ADDR_DIFF_VEC)
848 /* These aren't actually jump insns, but they never fall
849 through, so... */
850 fall_through = 0;
851 else
852 {
853 if (GET_CODE (pattern) != SET || SET_DEST (pattern) != pc_rtx)
854 abort ();
855 if (GET_CODE (SET_SRC (pattern)) != IF_THEN_ELSE)
856 fall_through = 0;
857 }
858 }
859
860 if (code != NOTE)
861 prev_code = code;
862 else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
1686f1a2
R
863 {
864 /* Make a fake insn to tag our notes on. */
865 bb_graph[i].first_insn = insn
38a448ca 866 = emit_insn_after (gen_rtx_USE (VOIDmode, stack_pointer_rtx),
1686f1a2
R
867 insn);
868 prev_code = CALL_INSN;
869 }
86144b75
DE
870 }
871
872 /* If the code at the end of the function would give a new block, then
873 do the following. */
874
875 if (prev_code == JUMP_INSN || prev_code == CALL_INSN
876 || prev_code == CODE_LABEL || prev_code == BARRIER)
877 {
878 if (fall_through)
879 {
880 arcptr = (struct adj_list *) alloca (sizeof (struct adj_list));
881 init_arc (arcptr, i, i + 1, 0);
882 arcptr->fall_through = 1;
883
884 num_arcs++;
885 }
886
887 /* This may not be a real insn, but that should not cause a problem. */
888 bb_graph[i+1].first_insn = get_last_insn ();
889 }
890
891 /* There is always a fake arc from the last block of the function
892 to the function exit block. */
893 arcptr = (struct adj_list *) alloca (sizeof (struct adj_list));
894 init_arc (arcptr, num_blocks-2, num_blocks-1, 0);
895 arcptr->fake = 1;
896 num_arcs++;
897 }
898
899 total_num_arcs += num_arcs;
900 if (dump_file)
901 fprintf (dump_file, "%d arcs\n", num_arcs);
902
903 /* Create spanning tree from basic block graph, mark each arc that is
904 on the spanning tree. */
905
906 /* To reduce the instrumentation cost, make two passes over the tree.
907 First, put as many must-split (crowded and fake) arcs on the tree as
908 possible, then on the second pass fill in the rest of the tree.
909 Note that the spanning tree is considered undirected, so that as many
910 must-split arcs as possible can be put on it.
911
956d6950 912 Fallthrough arcs which are crowded should not be chosen on the first
86144b75
DE
913 pass, since they do not require creating a new basic block. These
914 arcs will have fall_through set. */
915
916 find_spanning_tree (num_blocks);
917
918 /* Create a .bbg file from which gcov can reconstruct the basic block
919 graph. First output the number of basic blocks, and then for every
920 arc output the source and target basic block numbers.
921 NOTE: The format of this file must be compatible with gcov. */
922
923 if (flag_test_coverage)
924 {
925 int flag_bits;
926
927 __write_long (num_blocks, bbg_file, 4);
928 __write_long (num_arcs, bbg_file, 4);
929
930 for (i = 0; i < num_blocks; i++)
931 {
932 long count = 0;
933 for (arcptr = bb_graph[i].succ; arcptr; arcptr = arcptr->succ_next)
934 count++;
935 __write_long (count, bbg_file, 4);
936
937 for (arcptr = bb_graph[i].succ; arcptr; arcptr = arcptr->succ_next)
938 {
939 flag_bits = 0;
940 if (arcptr->on_tree)
941 flag_bits |= 0x1;
942 if (arcptr->fake)
943 flag_bits |= 0x2;
944 if (arcptr->fall_through)
945 flag_bits |= 0x4;
946
947 __write_long (ARC_TARGET (arcptr), bbg_file, 4);
948 __write_long (flag_bits, bbg_file, 4);
949 }
950 }
951
952 /* Emit a -1 to separate the list of all arcs from the list of
953 loop back edges that follows. */
954 __write_long (-1, bbg_file, 4);
955 }
956
957 /* For each arc not on the spanning tree, add counting code as rtl. */
958
959 if (profile_arc_flag)
960 instrument_arcs (f, num_blocks, dump_file);
961
962 /* Execute the rest only if doing branch probabilities. */
963 if (! flag_branch_probabilities)
964 return;
965
966 /* For each arc not on the spanning tree, set its execution count from
967 the .da file. */
968
969 /* The first count in the .da file is the number of times that the function
970 was entered. This is the exec_count for block zero. */
971
972 num_arcs = 0;
973 for (i = 0; i < num_blocks; i++)
974 for (arcptr = bb_graph[i].succ; arcptr; arcptr = arcptr->succ_next)
975 if (! arcptr->on_tree)
976 {
977 num_arcs++;
978 if (da_file)
979 {
980 long value;
981 __read_long (&value, da_file, 8);
982 ARC_COUNT (arcptr) = value;
983 }
984 else
985 ARC_COUNT (arcptr) = 0;
986 arcptr->count_valid = 1;
987 bb_graph[i].succ_count--;
988 bb_graph[ARC_TARGET (arcptr)].pred_count--;
989 }
990
991 if (dump_file)
992 fprintf (dump_file, "%d arc counts read\n", num_arcs);
993
994 /* For every block in the file,
995 - if every exit/entrance arc has a known count, then set the block count
996 - if the block count is known, and every exit/entrance arc but one has
997 a known execution count, then set the count of the remaining arc
998
999 As arc counts are set, decrement the succ/pred count, but don't delete
1000 the arc, that way we can easily tell when all arcs are known, or only
1001 one arc is unknown. */
1002
1003 /* The order that the basic blocks are iterated through is important.
1004 Since the code that finds spanning trees starts with block 0, low numbered
1005 arcs are put on the spanning tree in preference to high numbered arcs.
1006 Hence, most instrumented arcs are at the end. Graph solving works much
1007 faster if we propagate numbers from the end to the start.
1008
1009 This takes an average of slightly more than 3 passes. */
1010
1011 changes = 1;
1012 passes = 0;
1013 while (changes)
1014 {
1015 passes++;
1016 changes = 0;
1017
1018 for (i = num_blocks - 1; i >= 0; i--)
1019 {
1020 struct bb_info *binfo = &bb_graph[i];
1021 if (! binfo->count_valid)
1022 {
1023 if (binfo->succ_count == 0)
1024 {
1025 total = 0;
1026 for (arcptr = binfo->succ; arcptr;
1027 arcptr = arcptr->succ_next)
1028 total += ARC_COUNT (arcptr);
1029 binfo->exec_count = total;
1030 binfo->count_valid = 1;
1031 changes = 1;
1032 }
1033 else if (binfo->pred_count == 0)
1034 {
1035 total = 0;
1036 for (arcptr = binfo->pred; arcptr;
1037 arcptr = arcptr->pred_next)
1038 total += ARC_COUNT (arcptr);
1039 binfo->exec_count = total;
1040 binfo->count_valid = 1;
1041 changes = 1;
1042 }
1043 }
1044 if (binfo->count_valid)
1045 {
1046 if (binfo->succ_count == 1)
1047 {
1048 total = 0;
1049 /* One of the counts will be invalid, but it is zero,
1050 so adding it in also doesn't hurt. */
1051 for (arcptr = binfo->succ; arcptr;
1052 arcptr = arcptr->succ_next)
1053 total += ARC_COUNT (arcptr);
1054 /* Calculate count for remaining arc by conservation. */
1055 total = binfo->exec_count - total;
1056 /* Search for the invalid arc, and set its count. */
1057 for (arcptr = binfo->succ; arcptr;
1058 arcptr = arcptr->succ_next)
1059 if (! arcptr->count_valid)
1060 break;
1061 if (! arcptr)
1062 abort ();
1063 arcptr->count_valid = 1;
1064 ARC_COUNT (arcptr) = total;
1065 binfo->succ_count--;
1066
1067 bb_graph[ARC_TARGET (arcptr)].pred_count--;
1068 changes = 1;
1069 }
1070 if (binfo->pred_count == 1)
1071 {
1072 total = 0;
1073 /* One of the counts will be invalid, but it is zero,
1074 so adding it in also doesn't hurt. */
1075 for (arcptr = binfo->pred; arcptr;
1076 arcptr = arcptr->pred_next)
1077 total += ARC_COUNT (arcptr);
1078 /* Calculate count for remaining arc by conservation. */
1079 total = binfo->exec_count - total;
1080 /* Search for the invalid arc, and set its count. */
1081 for (arcptr = binfo->pred; arcptr;
1082 arcptr = arcptr->pred_next)
1083 if (! arcptr->count_valid)
1084 break;
1085 if (! arcptr)
1086 abort ();
1087 arcptr->count_valid = 1;
1088 ARC_COUNT (arcptr) = total;
1089 binfo->pred_count--;
1090
1091 bb_graph[ARC_SOURCE (arcptr)].succ_count--;
1092 changes = 1;
1093 }
1094 }
1095 }
1096 }
1097
1098 total_num_passes += passes;
1099 if (dump_file)
1100 fprintf (dump_file, "Graph solving took %d passes.\n\n", passes);
1101
1102 /* If the graph has been correctly solved, every block will have a
1103 succ and pred count of zero. */
1104 for (i = 0; i < num_blocks; i++)
1105 {
1106 struct bb_info *binfo = &bb_graph[i];
1107 if (binfo->succ_count || binfo->pred_count)
1108 abort ();
1109 }
1110
1111 /* For every arc, calculate its branch probability and add a reg_note
1112 to the branch insn to indicate this. */
1113
1114 for (i = 0; i < 20; i++)
1115 hist_br_prob[i] = 0;
1116 num_never_executed = 0;
1117 num_branches = 0;
1118
1119 for (i = 0; i < num_blocks; i++)
1120 {
1121 struct bb_info *binfo = &bb_graph[i];
1122
1123 total = binfo->exec_count;
1124 for (arcptr = binfo->succ; arcptr; arcptr = arcptr->succ_next)
1125 {
1126 if (arcptr->branch_insn)
1127 {
1128 /* This calculates the branch probability as an integer between
1129 0 and REG_BR_PROB_BASE, properly rounded to the nearest
1130 integer. Perform the arithmetic in double to avoid
1131 overflowing the range of ints. */
1132
1133 if (total == 0)
1134 prob = -1;
1135 else
1136 {
1137 rtx pat = PATTERN (arcptr->branch_insn);
1138
1139 prob = (((double)ARC_COUNT (arcptr) * REG_BR_PROB_BASE)
1140 + (total >> 1)) / total;
1141 if (prob < 0 || prob > REG_BR_PROB_BASE)
1142 {
1143 if (dump_file)
1144 fprintf (dump_file, "bad count: prob for %d-%d thought to be %d (forcibly normalized)\n",
1145 ARC_SOURCE (arcptr), ARC_TARGET (arcptr),
1146 prob);
1147
1148 bad_counts = 1;
1149 prob = REG_BR_PROB_BASE / 2;
1150 }
1151
1152 /* Match up probability with JUMP pattern. */
1153
1154 if (GET_CODE (pat) == SET
1155 && GET_CODE (SET_SRC (pat)) == IF_THEN_ELSE)
1156 {
1157 if (ARC_TARGET (arcptr) == ARC_SOURCE (arcptr) + 1)
1158 {
1159 /* A fall through arc should never have a
1160 branch insn. */
1161 abort ();
1162 }
1163 else
1164 {
1165 /* This is the arc for the taken branch. */
1166 if (GET_CODE (XEXP (SET_SRC (pat), 2)) != PC)
1167 prob = REG_BR_PROB_BASE - prob;
1168 }
1169 }
1170 }
1171
1172 if (prob == -1)
1173 num_never_executed++;
1174 else
1175 {
1176 int index = prob * 20 / REG_BR_PROB_BASE;
1177 if (index == 20)
1178 index = 19;
1179 hist_br_prob[index]++;
1180 }
1181 num_branches++;
1182
1183 REG_NOTES (arcptr->branch_insn)
38a448ca
RH
1184 = gen_rtx_EXPR_LIST (REG_BR_PROB, GEN_INT (prob),
1185 REG_NOTES (arcptr->branch_insn));
86144b75
DE
1186 }
1187 }
1188
1189 /* Add a REG_EXEC_COUNT note to the first instruction of this block. */
1190 if (! binfo->first_insn
1191 || GET_RTX_CLASS (GET_CODE (binfo->first_insn)) != 'i')
1192 {
1193 /* Block 0 is a fake block representing function entry, and does
1194 not have a real first insn. The second last block might not
1195 begin with a real insn. */
1196 if (i == num_blocks - 1)
1197 return_label_execution_count = total;
1198 else if (i != 0 && i != num_blocks - 2)
1199 abort ();
1200 }
1201 else
1202 {
1203 REG_NOTES (binfo->first_insn)
38a448ca
RH
1204 = gen_rtx_EXPR_LIST (REG_EXEC_COUNT, GEN_INT (total),
1205 REG_NOTES (binfo->first_insn));
86144b75
DE
1206 if (i == num_blocks - 1)
1207 return_label_execution_count = total;
1208 }
1209 }
1210
1211 /* This should never happen. */
1212 if (bad_counts)
1213 warning ("Arc profiling: some arc counts were bad.");
1214
1215 if (dump_file)
1216 {
1217 fprintf (dump_file, "%d branches\n", num_branches);
1218 fprintf (dump_file, "%d branches never executed\n",
1219 num_never_executed);
1220 if (num_branches)
1221 for (i = 0; i < 10; i++)
1222 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1223 (hist_br_prob[i]+hist_br_prob[19-i])*100/num_branches,
1224 5*i, 5*i+5);
1225
1226 total_num_branches += num_branches;
1227 total_num_never_executed += num_never_executed;
1228 for (i = 0; i < 20; i++)
1229 total_hist_br_prob[i] += hist_br_prob[i];
1230 }
1231
1232}
1233\f
1234/* Initialize a new arc.
1235 ARCPTR is the empty adj_list this function fills in.
1236 SOURCE is the block number of the source block.
1237 TARGET is the block number of the target block.
1238 INSN is the insn which transfers control from SOURCE to TARGET,
1239 or zero if the transfer is implicit. */
1240
1241static void
1242init_arc (arcptr, source, target, insn)
1243 struct adj_list *arcptr;
1244 int source, target;
1245 rtx insn;
1246{
1247 ARC_TARGET (arcptr) = target;
1248 ARC_SOURCE (arcptr) = source;
1249
1250 ARC_COUNT (arcptr) = 0;
1251 arcptr->count_valid = 0;
1252 arcptr->on_tree = 0;
1253 arcptr->fake = 0;
1254 arcptr->fall_through = 0;
1255 arcptr->branch_insn = insn;
1256
1257 arcptr->succ_next = bb_graph[source].succ;
1258 bb_graph[source].succ = arcptr;
1259 bb_graph[source].succ_count++;
1260
1261 arcptr->pred_next = bb_graph[target].pred;
1262 bb_graph[target].pred = arcptr;
1263 bb_graph[target].pred_count++;
1264}
1265
1266/* This function searches all of the arcs in the program flow graph, and puts
1267 as many bad arcs as possible onto the spanning tree. Bad arcs include
1268 fake arcs (needed for setjmp(), longjmp(), exit()) which MUST be on the
1269 spanning tree as they can't be instrumented. Also, arcs which must be
1270 split when instrumented should be part of the spanning tree if possible. */
1271
1272static void
1273find_spanning_tree (num_blocks)
1274 int num_blocks;
1275{
1276 int i;
1277 struct adj_list *arcptr;
1278 struct bb_info *binfo = &bb_graph[0];
1279
1280 /* Fake arcs must be part of the spanning tree, and are always safe to put
1281 on the spanning tree. Fake arcs will either be a successor of node 0,
1282 a predecessor of the last node, or from the last node to node 0. */
1283
1284 for (arcptr = bb_graph[0].succ; arcptr; arcptr = arcptr->succ_next)
1285 if (arcptr->fake)
1286 {
1287 /* Adding this arc should never cause a cycle. This is a fatal
1288 error if it would. */
1289 if (bb_graph[ARC_TARGET (arcptr)].on_tree && binfo->on_tree)
1290 abort();
1291 else
1292 {
1293 arcptr->on_tree = 1;
1294 bb_graph[ARC_TARGET (arcptr)].on_tree = 1;
1295 binfo->on_tree = 1;
1296 }
1297 }
1298
1299 binfo = &bb_graph[num_blocks-1];
1300 for (arcptr = binfo->pred; arcptr; arcptr = arcptr->pred_next)
1301 if (arcptr->fake)
1302 {
1303 /* Adding this arc should never cause a cycle. This is a fatal
1304 error if it would. */
1305 if (bb_graph[ARC_SOURCE (arcptr)].on_tree && binfo->on_tree)
1306 abort();
1307 else
1308 {
1309 arcptr->on_tree = 1;
1310 bb_graph[ARC_SOURCE (arcptr)].on_tree = 1;
1311 binfo->on_tree = 1;
1312 }
1313 }
1314 /* The only entrace to node zero is a fake arc. */
1315 bb_graph[0].pred->on_tree = 1;
1316
1317 /* Arcs which are crowded at both the source and target should be put on
1318 the spanning tree if possible, except for fall_throuch arcs which never
1319 require adding a new block even if crowded, add arcs with the same source
1320 and dest which must always be instrumented. */
1321 for (i = 0; i < num_blocks; i++)
1322 {
1323 binfo = &bb_graph[i];
1324
1325 for (arcptr = binfo->succ; arcptr; arcptr = arcptr->succ_next)
1326 if (! ((binfo->succ == arcptr && arcptr->succ_next == 0)
1327 || (bb_graph[ARC_TARGET (arcptr)].pred
1328 && arcptr->pred_next == 0))
1329 && ! arcptr->fall_through
1330 && ARC_TARGET (arcptr) != i)
1331 {
1332 /* This is a crowded arc at both source and target. Try to put
1333 in on the spanning tree. Can do this if either the source or
1334 target block is not yet on the tree. */
1335 if (! bb_graph[ARC_TARGET (arcptr)].on_tree || ! binfo->on_tree)
1336 {
1337 arcptr->on_tree = 1;
1338 bb_graph[ARC_TARGET (arcptr)].on_tree = 1;
1339 binfo->on_tree = 1;
1340 }
1341 }
1342 }
1343
1344 /* Clear all of the basic block on_tree bits, so that we can use them to
1345 create the spanning tree. */
1346 for (i = 0; i < num_blocks; i++)
1347 bb_graph[i].on_tree = 0;
1348
1349 /* Now fill in the spanning tree until every basic block is on it.
1350 Don't put the 0 to 1 fall through arc on the tree, since it is
1351 always cheap to instrument, so start filling the tree from node 1. */
1352
1353 for (i = 1; i < num_blocks; i++)
1354 for (arcptr = bb_graph[i].succ; arcptr; arcptr = arcptr->succ_next)
1355 if (! arcptr->on_tree
1356 && ! bb_graph[ARC_TARGET (arcptr)].on_tree)
1357 {
1358 fill_spanning_tree (i);
1359 break;
1360 }
1361}
1362
1363/* Add arcs reached from BLOCK to the spanning tree if they are needed and
1364 not already there. */
1365
1366static void
1367fill_spanning_tree (block)
1368 int block;
1369{
1370 struct adj_list *arcptr;
1371
1372 expand_spanning_tree (block);
1373
1374 for (arcptr = bb_graph[block].succ; arcptr; arcptr = arcptr->succ_next)
1375 if (! arcptr->on_tree
1376 && ! bb_graph[ARC_TARGET (arcptr)].on_tree)
1377 {
1378 arcptr->on_tree = 1;
1379 fill_spanning_tree (ARC_TARGET (arcptr));
1380 }
1381}
1382
1383/* When first visit a block, must add all blocks that are already connected
1384 to this block via tree arcs to the spanning tree. */
1385
1386static void
1387expand_spanning_tree (block)
1388 int block;
1389{
1390 struct adj_list *arcptr;
1391
1392 bb_graph[block].on_tree = 1;
1393
1394 for (arcptr = bb_graph[block].succ; arcptr; arcptr = arcptr->succ_next)
1395 if (arcptr->on_tree && ! bb_graph[ARC_TARGET (arcptr)].on_tree)
1396 expand_spanning_tree (ARC_TARGET (arcptr));
1397
1398 for (arcptr = bb_graph[block].pred;
1399 arcptr; arcptr = arcptr->pred_next)
1400 if (arcptr->on_tree && ! bb_graph[ARC_SOURCE (arcptr)].on_tree)
1401 expand_spanning_tree (ARC_SOURCE (arcptr));
1402}
1403\f
1404/* Perform file-level initialization for branch-prob processing. */
1405
1406void
1407init_branch_prob (filename)
1408 char *filename;
1409{
1410 long len;
1411 int i;
1412
1413 if (flag_test_coverage)
1414 {
1415 /* Open an output file for the basic block/line number map. */
1416 int len = strlen (filename);
1417 char *data_file = (char *) alloca (len + 4);
1418 strcpy (data_file, filename);
1419 strip_off_ending (data_file, len);
1420 strcat (data_file, ".bb");
1421 if ((bb_file = fopen (data_file, "w")) == 0)
1422 pfatal_with_name (data_file);
1423
1424 /* Open an output file for the program flow graph. */
1425 len = strlen (filename);
1426 bbg_file_name = (char *) alloca (len + 5);
1427 strcpy (bbg_file_name, filename);
1428 strip_off_ending (bbg_file_name, len);
1429 strcat (bbg_file_name, ".bbg");
1430 if ((bbg_file = fopen (bbg_file_name, "w")) == 0)
1431 pfatal_with_name (bbg_file_name);
1432
1433 /* Initialize to zero, to ensure that the first file name will be
1434 written to the .bb file. */
1435 last_bb_file_name = 0;
1436 }
1437
1438 if (flag_branch_probabilities)
1439 {
1440 len = strlen (filename);
1441 da_file_name = (char *) alloca (len + 4);
1442 strcpy (da_file_name, filename);
1443 strip_off_ending (da_file_name, len);
1444 strcat (da_file_name, ".da");
1445 if ((da_file = fopen (da_file_name, "r")) == 0)
1446 warning ("file %s not found, execution counts assumed to be zero.",
1447 da_file_name);
1448
1449 /* The first word in the .da file gives the number of instrumented arcs,
1450 which is not needed for our purposes. */
1451
1452 if (da_file)
1453 __read_long (&len, da_file, 8);
1454 }
1455
1456 if (profile_arc_flag)
1457 init_arc_profiler ();
1458
1459 total_num_blocks = 0;
1460 total_num_arcs = 0;
1461 total_num_arcs_instrumented = 0;
1462 total_num_blocks_created = 0;
1463 total_num_passes = 0;
1464 total_num_times_called = 0;
1465 total_num_branches = 0;
1466 total_num_never_executed = 0;
1467 for (i = 0; i < 20; i++)
1468 total_hist_br_prob[i] = 0;
1469}
1470
1471/* Performs file-level cleanup after branch-prob processing
1472 is completed. */
1473
1474void
1475end_branch_prob (dump_file)
1476 FILE *dump_file;
1477{
1478 if (flag_test_coverage)
1479 {
1480 fclose (bb_file);
1481 fclose (bbg_file);
1482 }
1483
1484 if (flag_branch_probabilities)
1485 {
1486 if (da_file)
1487 {
1488 long temp;
1489 /* This seems slightly dangerous, as it presumes the EOF
1490 flag will not be set until an attempt is made to read
1491 past the end of the file. */
1492 if (feof (da_file))
1493 warning (".da file contents exhausted too early\n");
1494 /* Should be at end of file now. */
1495 if (__read_long (&temp, da_file, 8) == 0)
1496 warning (".da file contents not exhausted\n");
1497 fclose (da_file);
1498 }
1499 }
1500
1501 if (dump_file)
1502 {
1503 fprintf (dump_file, "\n");
1504 fprintf (dump_file, "Total number of blocks: %d\n", total_num_blocks);
1505 fprintf (dump_file, "Total number of arcs: %d\n", total_num_arcs);
1506 fprintf (dump_file, "Total number of instrumented arcs: %d\n",
1507 total_num_arcs_instrumented);
1508 fprintf (dump_file, "Total number of blocks created: %d\n",
1509 total_num_blocks_created);
1510 fprintf (dump_file, "Total number of graph solution passes: %d\n",
1511 total_num_passes);
1512 if (total_num_times_called != 0)
1513 fprintf (dump_file, "Average number of graph solution passes: %d\n",
1514 (total_num_passes + (total_num_times_called >> 1))
1515 / total_num_times_called);
1516 fprintf (dump_file, "Total number of branches: %d\n", total_num_branches);
1517 fprintf (dump_file, "Total number of branches never executed: %d\n",
1518 total_num_never_executed);
1519 if (total_num_branches)
1520 {
1521 int i;
1522
1523 for (i = 0; i < 10; i++)
1524 fprintf (dump_file, "%d%% branches in range %d-%d%%\n",
1525 (total_hist_br_prob[i] + total_hist_br_prob[19-i]) * 100
1526 / total_num_branches, 5*i, 5*i+5);
1527 }
1528 }
1529}
1530\f
1531/* The label used by the arc profiling code. */
1532
1533static rtx profiler_label;
1534
1535/* Initialize the profiler_label. */
1536
1537static void
1538init_arc_profiler ()
1539{
1540 /* Generate and save a copy of this so it can be shared. */
1541 char *name = xmalloc (20);
1542 ASM_GENERATE_INTERNAL_LABEL (name, "LPBX", 2);
38a448ca 1543 profiler_label = gen_rtx_SYMBOL_REF (Pmode, name);
86144b75
DE
1544}
1545
1546/* Output instructions as RTL to increment the arc execution count. */
1547
1548static void
1549output_arc_profiler (arcno, insert_after)
1550 int arcno;
1551 rtx insert_after;
1552{
1553 rtx profiler_target_addr
1554 = (arcno
38a448ca
RH
1555 ? gen_rtx_CONST (Pmode,
1556 gen_rtx_PLUS (Pmode, profiler_label,
1557 GEN_INT (LONG_TYPE_SIZE / BITS_PER_UNIT * arcno)))
86144b75
DE
1558 : profiler_label);
1559 enum machine_mode mode = mode_for_size (LONG_TYPE_SIZE, MODE_INT, 0);
1560 rtx profiler_reg = gen_reg_rtx (mode);
1561 rtx address_reg = gen_reg_rtx (Pmode);
1562 rtx mem_ref, add_ref;
1563 rtx sequence;
1564
86144b75
DE
1565 /* In this case, reload can use explicitly mentioned hard registers for
1566 reloads. It is not safe to output profiling code between a call
1567 and the instruction that copies the result to a pseudo-reg. This
1568 is because reload may allocate one of the profiling code pseudo-regs
1569 to the return value reg, thus clobbering the return value. So we
1570 must check for calls here, and emit the profiling code after the
1571 instruction that uses the return value, if any.
1572
1573 ??? The code here performs the same tests that reload does so hopefully
1574 all the bases are covered. */
1575
1576 if (SMALL_REGISTER_CLASSES
1577 && GET_CODE (insert_after) == CALL_INSN
1578 && (GET_CODE (PATTERN (insert_after)) == SET
1579 || (GET_CODE (PATTERN (insert_after)) == PARALLEL
1580 && GET_CODE (XVECEXP (PATTERN (insert_after), 0, 0)) == SET)))
1581 {
1582 rtx return_reg;
1583 rtx next_insert_after = next_nonnote_insn (insert_after);
1584
6030b6af 1585 /* The first insn after the call may be a stack pop, skip it. */
f4da7881
JW
1586 if (next_insert_after
1587 && GET_CODE (next_insert_after) == INSN
6030b6af
JW
1588 && GET_CODE (PATTERN (next_insert_after)) == SET
1589 && SET_DEST (PATTERN (next_insert_after)) == stack_pointer_rtx)
1590 next_insert_after = next_nonnote_insn (next_insert_after);
1591
f4da7881
JW
1592 if (next_insert_after
1593 && GET_CODE (next_insert_after) == INSN)
86144b75 1594 {
86144b75
DE
1595 if (GET_CODE (PATTERN (insert_after)) == SET)
1596 return_reg = SET_DEST (PATTERN (insert_after));
1597 else
1598 return_reg = SET_DEST (XVECEXP (PATTERN (insert_after), 0, 0));
1599
e9a25f70
JL
1600 /* Now, NEXT_INSERT_AFTER may be an instruction that uses the
1601 return value. However, it could also be something else,
1602 like a CODE_LABEL, so check that the code is INSN. */
1603 if (next_insert_after != 0
1604 && GET_RTX_CLASS (GET_CODE (next_insert_after)) == 'i'
1605 && reg_referenced_p (return_reg, PATTERN (next_insert_after)))
86144b75
DE
1606 insert_after = next_insert_after;
1607 }
1608 }
86144b75
DE
1609
1610 start_sequence ();
1611
1612 emit_move_insn (address_reg, profiler_target_addr);
38a448ca 1613 mem_ref = gen_rtx_MEM (mode, address_reg);
86144b75
DE
1614 emit_move_insn (profiler_reg, mem_ref);
1615
38a448ca 1616 add_ref = gen_rtx_PLUS (mode, profiler_reg, GEN_INT (1));
86144b75
DE
1617 emit_move_insn (profiler_reg, add_ref);
1618
1619 /* This is the same rtx as above, but it is not legal to share this rtx. */
38a448ca 1620 mem_ref = gen_rtx_MEM (mode, address_reg);
86144b75
DE
1621 emit_move_insn (mem_ref, profiler_reg);
1622
1623 sequence = gen_sequence ();
1624 end_sequence ();
1625 emit_insn_after (sequence, insert_after);
1626}
1627
1628/* Output code for a constructor that will invoke __bb_init_func, if
1629 this has not already been done. */
1630
1631void
1632output_func_start_profiler ()
1633{
1634 tree fnname, fndecl;
1635 char *name, *cfnname;
1636 rtx table_address;
1637 enum machine_mode mode = mode_for_size (LONG_TYPE_SIZE, MODE_INT, 0);
d1271ded 1638 int save_flag_inline_functions = flag_inline_functions;
86144b75
DE
1639
1640 /* It's either already been output, or we don't need it because we're
1641 not doing profile-arcs. */
1642 if (! need_func_profiler)
1643 return;
1644
1645 need_func_profiler = 0;
1646
1647 /* Synthesize a constructor function to invoke __bb_init_func with a
1648 pointer to this object file's profile block. */
1649 start_sequence ();
1650
1651 /* Try and make a unique name given the "file function name".
1652
1653 And no, I don't like this either. */
1654
1655 fnname = get_file_function_name ('I');
1656 cfnname = IDENTIFIER_POINTER (fnname);
1657 name = xmalloc (strlen (cfnname) + 5);
1658 sprintf (name, "%sGCOV",cfnname);
1659 fnname = get_identifier (name);
1660 free (name);
1661
1662 fndecl = build_decl (FUNCTION_DECL, fnname,
1663 build_function_type (void_type_node, NULL_TREE));
1aeb1f6f 1664 DECL_EXTERNAL (fndecl) = 0;
86144b75
DE
1665 TREE_PUBLIC (fndecl) = 1;
1666 DECL_ASSEMBLER_NAME (fndecl) = fnname;
1667 DECL_RESULT (fndecl) = build_decl (RESULT_DECL, NULL_TREE, void_type_node);
1668 current_function_decl = fndecl;
1669 pushlevel (0);
1670 make_function_rtl (fndecl);
1671 init_function_start (fndecl, input_filename, lineno);
1672 expand_function_start (fndecl, 0);
1673
1674 /* Actually generate the code to call __bb_init_func. */
1675 name = xmalloc (20);
1676 ASM_GENERATE_INTERNAL_LABEL (name, "LPBX", 0);
38a448ca
RH
1677 table_address = force_reg (Pmode, gen_rtx_SYMBOL_REF (Pmode, name));
1678 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__bb_init_func"), 0,
86144b75
DE
1679 mode, 1, table_address, Pmode);
1680
1681 expand_function_end (input_filename, lineno, 0);
1682 poplevel (1, 0, 1);
d1271ded
R
1683
1684 /* Since fndecl isn't in the list of globals, it would never be emitted
1685 when it's considered to be 'safe' for inlining, so turn off
1686 flag_inline_functions. */
1687 flag_inline_functions = 0;
1688
86144b75 1689 rest_of_compilation (fndecl);
d1271ded
R
1690
1691 /* Reset flag_inline_functions to its original value. */
1692 flag_inline_functions = save_flag_inline_functions;
1693
e5e809f4
JL
1694 if (! quiet_flag)
1695 fflush (asm_out_file);
86144b75
DE
1696 current_function_decl = NULL_TREE;
1697
1698 assemble_constructor (IDENTIFIER_POINTER (DECL_NAME (fndecl)));
1699}