]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/value-prof.c
gcov: rename 2 options.
[thirdparty/gcc.git] / gcc / value-prof.c
1 /* Transformations based on profile information for values.
2 Copyright (C) 2003-2020 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfghooks.h"
28 #include "ssa.h"
29 #include "cgraph.h"
30 #include "coverage.h"
31 #include "data-streamer.h"
32 #include "diagnostic.h"
33 #include "fold-const.h"
34 #include "tree-nested.h"
35 #include "calls.h"
36 #include "expr.h"
37 #include "value-prof.h"
38 #include "tree-eh.h"
39 #include "gimplify.h"
40 #include "gimple-iterator.h"
41 #include "tree-cfg.h"
42 #include "gimple-pretty-print.h"
43 #include "dumpfile.h"
44 #include "builtins.h"
45
46 /* In this file value profile based optimizations are placed. Currently the
47 following optimizations are implemented (for more detailed descriptions
48 see comments at value_profile_transformations):
49
50 1) Division/modulo specialization. Provided that we can determine that the
51 operands of the division have some special properties, we may use it to
52 produce more effective code.
53
54 2) Indirect/virtual call specialization. If we can determine most
55 common function callee in indirect/virtual call. We can use this
56 information to improve code effectiveness (especially info for
57 the inliner).
58
59 3) Speculative prefetching. If we are able to determine that the difference
60 between addresses accessed by a memory reference is usually constant, we
61 may add the prefetch instructions.
62 FIXME: This transformation was removed together with RTL based value
63 profiling.
64
65
66 Value profiling internals
67 ==========================
68
69 Every value profiling transformation starts with defining what values
70 to profile. There are different histogram types (see HIST_TYPE_* in
71 value-prof.h) and each transformation can request one or more histogram
72 types per GIMPLE statement. The function gimple_find_values_to_profile()
73 collects the values to profile in a vec, and adds the number of counters
74 required for the different histogram types.
75
76 For a -fprofile-generate run, the statements for which values should be
77 recorded, are instrumented in instrument_values(). The instrumentation
78 is done by helper functions that can be found in tree-profile.c, where
79 new types of histograms can be added if necessary.
80
81 After a -fprofile-use, the value profiling data is read back in by
82 compute_value_histograms() that translates the collected data to
83 histograms and attaches them to the profiled statements via
84 gimple_add_histogram_value(). Histograms are stored in a hash table
85 that is attached to every intrumented function, see VALUE_HISTOGRAMS
86 in function.h.
87
88 The value-profile transformations driver is the function
89 gimple_value_profile_transformations(). It traverses all statements in
90 the to-be-transformed function, and looks for statements with one or
91 more histograms attached to it. If a statement has histograms, the
92 transformation functions are called on the statement.
93
94 Limitations / FIXME / TODO:
95 * Only one histogram of each type can be associated with a statement.
96 * Some value profile transformations are done in builtins.c (?!)
97 * Updating of histograms needs some TLC.
98 * The value profiling code could be used to record analysis results
99 from non-profiling (e.g. VRP).
100 * Adding new profilers should be simplified, starting with a cleanup
101 of what-happens-where and with making gimple_find_values_to_profile
102 and gimple_value_profile_transformations table-driven, perhaps...
103 */
104
105 static bool gimple_divmod_fixed_value_transform (gimple_stmt_iterator *);
106 static bool gimple_mod_pow2_value_transform (gimple_stmt_iterator *);
107 static bool gimple_mod_subtract_transform (gimple_stmt_iterator *);
108 static bool gimple_stringops_transform (gimple_stmt_iterator *);
109 static void dump_ic_profile (gimple_stmt_iterator *gsi);
110
111 /* Allocate histogram value. */
112
113 histogram_value
114 gimple_alloc_histogram_value (struct function *fun ATTRIBUTE_UNUSED,
115 enum hist_type type, gimple *stmt, tree value)
116 {
117 histogram_value hist = (histogram_value) xcalloc (1, sizeof (*hist));
118 hist->hvalue.value = value;
119 hist->hvalue.stmt = stmt;
120 hist->type = type;
121 return hist;
122 }
123
124 /* Hash value for histogram. */
125
126 static hashval_t
127 histogram_hash (const void *x)
128 {
129 return htab_hash_pointer (((const_histogram_value)x)->hvalue.stmt);
130 }
131
132 /* Return nonzero if statement for histogram_value X is Y. */
133
134 static int
135 histogram_eq (const void *x, const void *y)
136 {
137 return ((const_histogram_value) x)->hvalue.stmt == (const gimple *) y;
138 }
139
140 /* Set histogram for STMT. */
141
142 static void
143 set_histogram_value (struct function *fun, gimple *stmt, histogram_value hist)
144 {
145 void **loc;
146 if (!hist && !VALUE_HISTOGRAMS (fun))
147 return;
148 if (!VALUE_HISTOGRAMS (fun))
149 VALUE_HISTOGRAMS (fun) = htab_create (1, histogram_hash,
150 histogram_eq, NULL);
151 loc = htab_find_slot_with_hash (VALUE_HISTOGRAMS (fun), stmt,
152 htab_hash_pointer (stmt),
153 hist ? INSERT : NO_INSERT);
154 if (!hist)
155 {
156 if (loc)
157 htab_clear_slot (VALUE_HISTOGRAMS (fun), loc);
158 return;
159 }
160 *loc = hist;
161 }
162
163 /* Get histogram list for STMT. */
164
165 histogram_value
166 gimple_histogram_value (struct function *fun, gimple *stmt)
167 {
168 if (!VALUE_HISTOGRAMS (fun))
169 return NULL;
170 return (histogram_value) htab_find_with_hash (VALUE_HISTOGRAMS (fun), stmt,
171 htab_hash_pointer (stmt));
172 }
173
174 /* Add histogram for STMT. */
175
176 void
177 gimple_add_histogram_value (struct function *fun, gimple *stmt,
178 histogram_value hist)
179 {
180 hist->hvalue.next = gimple_histogram_value (fun, stmt);
181 set_histogram_value (fun, stmt, hist);
182 hist->fun = fun;
183 }
184
185 /* Remove histogram HIST from STMT's histogram list. */
186
187 void
188 gimple_remove_histogram_value (struct function *fun, gimple *stmt,
189 histogram_value hist)
190 {
191 histogram_value hist2 = gimple_histogram_value (fun, stmt);
192 if (hist == hist2)
193 {
194 set_histogram_value (fun, stmt, hist->hvalue.next);
195 }
196 else
197 {
198 while (hist2->hvalue.next != hist)
199 hist2 = hist2->hvalue.next;
200 hist2->hvalue.next = hist->hvalue.next;
201 }
202 free (hist->hvalue.counters);
203 if (flag_checking)
204 memset (hist, 0xab, sizeof (*hist));
205 free (hist);
206 }
207
208 /* Lookup histogram of type TYPE in the STMT. */
209
210 histogram_value
211 gimple_histogram_value_of_type (struct function *fun, gimple *stmt,
212 enum hist_type type)
213 {
214 histogram_value hist;
215 for (hist = gimple_histogram_value (fun, stmt); hist;
216 hist = hist->hvalue.next)
217 if (hist->type == type)
218 return hist;
219 return NULL;
220 }
221
222 /* Dump information about HIST to DUMP_FILE. */
223
224 static void
225 dump_histogram_value (FILE *dump_file, histogram_value hist)
226 {
227 switch (hist->type)
228 {
229 case HIST_TYPE_INTERVAL:
230 if (hist->hvalue.counters)
231 {
232 fprintf (dump_file, "Interval counter range [%d,%d]: [",
233 hist->hdata.intvl.int_start,
234 (hist->hdata.intvl.int_start
235 + hist->hdata.intvl.steps - 1));
236
237 unsigned int i;
238 for (i = 0; i < hist->hdata.intvl.steps; i++)
239 {
240 fprintf (dump_file, "%d:%" PRId64,
241 hist->hdata.intvl.int_start + i,
242 (int64_t) hist->hvalue.counters[i]);
243 if (i != hist->hdata.intvl.steps - 1)
244 fprintf (dump_file, ", ");
245 }
246 fprintf (dump_file, "] outside range: %" PRId64 ".\n",
247 (int64_t) hist->hvalue.counters[i]);
248 }
249 break;
250
251 case HIST_TYPE_POW2:
252 if (hist->hvalue.counters)
253 fprintf (dump_file, "Pow2 counter pow2:%" PRId64
254 " nonpow2:%" PRId64 ".\n",
255 (int64_t) hist->hvalue.counters[1],
256 (int64_t) hist->hvalue.counters[0]);
257 break;
258
259 case HIST_TYPE_TOPN_VALUES:
260 case HIST_TYPE_INDIR_CALL:
261 if (hist->hvalue.counters)
262 {
263 fprintf (dump_file,
264 (hist->type == HIST_TYPE_TOPN_VALUES
265 ? "Top N value counter" : "Indirect call counter"));
266 if (hist->hvalue.counters)
267 {
268 unsigned count = hist->hvalue.counters[1];
269 fprintf (dump_file, " all: %" PRId64 ", %" PRId64 " values: ",
270 (int64_t) hist->hvalue.counters[0], (int64_t) count);
271 for (unsigned i = 0; i < count; i++)
272 {
273 fprintf (dump_file, "[%" PRId64 ":%" PRId64 "]",
274 (int64_t) hist->hvalue.counters[2 * i + 2],
275 (int64_t) hist->hvalue.counters[2 * i + 3]);
276 if (i != count - 1)
277 fprintf (dump_file, ", ");
278 }
279 fprintf (dump_file, ".\n");
280 }
281 }
282 break;
283
284 case HIST_TYPE_AVERAGE:
285 if (hist->hvalue.counters)
286 fprintf (dump_file, "Average value sum:%" PRId64
287 " times:%" PRId64 ".\n",
288 (int64_t) hist->hvalue.counters[0],
289 (int64_t) hist->hvalue.counters[1]);
290 break;
291
292 case HIST_TYPE_IOR:
293 if (hist->hvalue.counters)
294 fprintf (dump_file, "IOR value ior:%" PRId64 ".\n",
295 (int64_t) hist->hvalue.counters[0]);
296 break;
297
298 case HIST_TYPE_TIME_PROFILE:
299 if (hist->hvalue.counters)
300 fprintf (dump_file, "Time profile time:%" PRId64 ".\n",
301 (int64_t) hist->hvalue.counters[0]);
302 break;
303 default:
304 gcc_unreachable ();
305 }
306 }
307
308 /* Dump information about HIST to DUMP_FILE. */
309
310 void
311 stream_out_histogram_value (struct output_block *ob, histogram_value hist)
312 {
313 struct bitpack_d bp;
314 unsigned int i;
315
316 bp = bitpack_create (ob->main_stream);
317 bp_pack_enum (&bp, hist_type, HIST_TYPE_MAX, hist->type);
318 bp_pack_value (&bp, hist->hvalue.next != NULL, 1);
319 streamer_write_bitpack (&bp);
320 switch (hist->type)
321 {
322 case HIST_TYPE_INTERVAL:
323 streamer_write_hwi (ob, hist->hdata.intvl.int_start);
324 streamer_write_uhwi (ob, hist->hdata.intvl.steps);
325 break;
326 default:
327 break;
328 }
329 for (i = 0; i < hist->n_counters; i++)
330 {
331 /* When user uses an unsigned type with a big value, constant converted
332 to gcov_type (a signed type) can be negative. */
333 gcov_type value = hist->hvalue.counters[i];
334 if (hist->type == HIST_TYPE_TOPN_VALUES)
335 ;
336 else
337 gcc_assert (value >= 0);
338
339 streamer_write_gcov_count (ob, value);
340 }
341 if (hist->hvalue.next)
342 stream_out_histogram_value (ob, hist->hvalue.next);
343 }
344
345 /* Dump information about HIST to DUMP_FILE. */
346
347 void
348 stream_in_histogram_value (class lto_input_block *ib, gimple *stmt)
349 {
350 enum hist_type type;
351 unsigned int ncounters = 0;
352 struct bitpack_d bp;
353 unsigned int i;
354 histogram_value new_val;
355 bool next;
356 histogram_value *next_p = NULL;
357
358 do
359 {
360 bp = streamer_read_bitpack (ib);
361 type = bp_unpack_enum (&bp, hist_type, HIST_TYPE_MAX);
362 next = bp_unpack_value (&bp, 1);
363 new_val = gimple_alloc_histogram_value (cfun, type, stmt);
364 switch (type)
365 {
366 case HIST_TYPE_INTERVAL:
367 new_val->hdata.intvl.int_start = streamer_read_hwi (ib);
368 new_val->hdata.intvl.steps = streamer_read_uhwi (ib);
369 ncounters = new_val->hdata.intvl.steps + 2;
370 break;
371
372 case HIST_TYPE_POW2:
373 case HIST_TYPE_AVERAGE:
374 ncounters = 2;
375 break;
376
377 case HIST_TYPE_TOPN_VALUES:
378 case HIST_TYPE_INDIR_CALL:
379 break;
380
381 case HIST_TYPE_IOR:
382 case HIST_TYPE_TIME_PROFILE:
383 ncounters = 1;
384 break;
385
386 default:
387 gcc_unreachable ();
388 }
389
390 /* TOP N counters have variable number of counters. */
391 if (type == HIST_TYPE_INDIR_CALL || type == HIST_TYPE_TOPN_VALUES)
392 {
393 gcov_type total = streamer_read_gcov_count (ib);
394 gcov_type ncounters = streamer_read_gcov_count (ib);
395 new_val->hvalue.counters = XNEWVAR (gcov_type,
396 sizeof (*new_val->hvalue.counters)
397 * (2 + 2 * ncounters));
398 new_val->hvalue.counters[0] = total;
399 new_val->hvalue.counters[1] = ncounters;
400 new_val->n_counters = 2 + 2 * ncounters;
401 for (i = 0; i < 2 * ncounters; i++)
402 new_val->hvalue.counters[2 + i] = streamer_read_gcov_count (ib);
403 }
404 else
405 {
406 new_val->hvalue.counters = XNEWVAR (gcov_type,
407 sizeof (*new_val->hvalue.counters)
408 * ncounters);
409 new_val->n_counters = ncounters;
410 for (i = 0; i < ncounters; i++)
411 new_val->hvalue.counters[i] = streamer_read_gcov_count (ib);
412 }
413
414 if (!next_p)
415 gimple_add_histogram_value (cfun, stmt, new_val);
416 else
417 *next_p = new_val;
418 next_p = &new_val->hvalue.next;
419 }
420 while (next);
421 }
422
423 /* Dump all histograms attached to STMT to DUMP_FILE. */
424
425 void
426 dump_histograms_for_stmt (struct function *fun, FILE *dump_file, gimple *stmt)
427 {
428 histogram_value hist;
429 for (hist = gimple_histogram_value (fun, stmt); hist; hist = hist->hvalue.next)
430 dump_histogram_value (dump_file, hist);
431 }
432
433 /* Remove all histograms associated with STMT. */
434
435 void
436 gimple_remove_stmt_histograms (struct function *fun, gimple *stmt)
437 {
438 histogram_value val;
439 while ((val = gimple_histogram_value (fun, stmt)) != NULL)
440 gimple_remove_histogram_value (fun, stmt, val);
441 }
442
443 /* Duplicate all histograms associates with OSTMT to STMT. */
444
445 void
446 gimple_duplicate_stmt_histograms (struct function *fun, gimple *stmt,
447 struct function *ofun, gimple *ostmt)
448 {
449 histogram_value val;
450 for (val = gimple_histogram_value (ofun, ostmt); val != NULL; val = val->hvalue.next)
451 {
452 histogram_value new_val = gimple_alloc_histogram_value (fun, val->type);
453 memcpy (new_val, val, sizeof (*val));
454 new_val->hvalue.stmt = stmt;
455 new_val->hvalue.counters = XNEWVAR (gcov_type, sizeof (*new_val->hvalue.counters) * new_val->n_counters);
456 memcpy (new_val->hvalue.counters, val->hvalue.counters, sizeof (*new_val->hvalue.counters) * new_val->n_counters);
457 gimple_add_histogram_value (fun, stmt, new_val);
458 }
459 }
460
461 /* Move all histograms associated with OSTMT to STMT. */
462
463 void
464 gimple_move_stmt_histograms (struct function *fun, gimple *stmt, gimple *ostmt)
465 {
466 histogram_value val = gimple_histogram_value (fun, ostmt);
467 if (val)
468 {
469 /* The following three statements can't be reordered,
470 because histogram hashtab relies on stmt field value
471 for finding the exact slot. */
472 set_histogram_value (fun, ostmt, NULL);
473 for (; val != NULL; val = val->hvalue.next)
474 val->hvalue.stmt = stmt;
475 set_histogram_value (fun, stmt, val);
476 }
477 }
478
479 static bool error_found = false;
480
481 /* Helper function for verify_histograms. For each histogram reachable via htab
482 walk verify that it was reached via statement walk. */
483
484 static int
485 visit_hist (void **slot, void *data)
486 {
487 hash_set<histogram_value> *visited = (hash_set<histogram_value> *) data;
488 histogram_value hist = *(histogram_value *) slot;
489
490 if (!visited->contains (hist)
491 && hist->type != HIST_TYPE_TIME_PROFILE)
492 {
493 error ("dead histogram");
494 dump_histogram_value (stderr, hist);
495 debug_gimple_stmt (hist->hvalue.stmt);
496 error_found = true;
497 }
498 return 1;
499 }
500
501 /* Verify sanity of the histograms. */
502
503 DEBUG_FUNCTION void
504 verify_histograms (void)
505 {
506 basic_block bb;
507 gimple_stmt_iterator gsi;
508 histogram_value hist;
509
510 error_found = false;
511 hash_set<histogram_value> visited_hists;
512 FOR_EACH_BB_FN (bb, cfun)
513 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
514 {
515 gimple *stmt = gsi_stmt (gsi);
516
517 for (hist = gimple_histogram_value (cfun, stmt); hist;
518 hist = hist->hvalue.next)
519 {
520 if (hist->hvalue.stmt != stmt)
521 {
522 error ("histogram value statement does not correspond to "
523 "the statement it is associated with");
524 debug_gimple_stmt (stmt);
525 dump_histogram_value (stderr, hist);
526 error_found = true;
527 }
528 visited_hists.add (hist);
529 }
530 }
531 if (VALUE_HISTOGRAMS (cfun))
532 htab_traverse (VALUE_HISTOGRAMS (cfun), visit_hist, &visited_hists);
533 if (error_found)
534 internal_error ("%qs failed", __func__);
535 }
536
537 /* Helper function for verify_histograms. For each histogram reachable via htab
538 walk verify that it was reached via statement walk. */
539
540 static int
541 free_hist (void **slot, void *data ATTRIBUTE_UNUSED)
542 {
543 histogram_value hist = *(histogram_value *) slot;
544 free (hist->hvalue.counters);
545 free (hist);
546 return 1;
547 }
548
549 void
550 free_histograms (struct function *fn)
551 {
552 if (VALUE_HISTOGRAMS (fn))
553 {
554 htab_traverse (VALUE_HISTOGRAMS (fn), free_hist, NULL);
555 htab_delete (VALUE_HISTOGRAMS (fn));
556 VALUE_HISTOGRAMS (fn) = NULL;
557 }
558 }
559
560 /* The overall number of invocations of the counter should match
561 execution count of basic block. Report it as error rather than
562 internal error as it might mean that user has misused the profile
563 somehow. */
564
565 static bool
566 check_counter (gimple *stmt, const char * name,
567 gcov_type *count, gcov_type *all, profile_count bb_count_d)
568 {
569 gcov_type bb_count = bb_count_d.ipa ().to_gcov_type ();
570 if (*all != bb_count || *count > *all)
571 {
572 dump_user_location_t locus;
573 locus = ((stmt != NULL)
574 ? dump_user_location_t (stmt)
575 : dump_user_location_t::from_function_decl
576 (current_function_decl));
577 if (flag_profile_correction)
578 {
579 if (dump_enabled_p ())
580 dump_printf_loc (MSG_MISSED_OPTIMIZATION, locus,
581 "correcting inconsistent value profile: %s "
582 "profiler overall count (%d) does not match BB "
583 "count (%d)\n", name, (int)*all, (int)bb_count);
584 *all = bb_count;
585 if (*count > *all)
586 *count = *all;
587 return false;
588 }
589 else
590 {
591 error_at (locus.get_location_t (), "corrupted value profile: %s "
592 "profile counter (%d out of %d) inconsistent with "
593 "basic-block count (%d)",
594 name,
595 (int) *count,
596 (int) *all,
597 (int) bb_count);
598 return true;
599 }
600 }
601
602 return false;
603 }
604
605 /* GIMPLE based transformations. */
606
607 bool
608 gimple_value_profile_transformations (void)
609 {
610 basic_block bb;
611 gimple_stmt_iterator gsi;
612 bool changed = false;
613
614 FOR_EACH_BB_FN (bb, cfun)
615 {
616 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
617 {
618 gimple *stmt = gsi_stmt (gsi);
619 histogram_value th = gimple_histogram_value (cfun, stmt);
620 if (!th)
621 continue;
622
623 if (dump_file)
624 {
625 fprintf (dump_file, "Trying transformations on stmt ");
626 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
627 dump_histograms_for_stmt (cfun, dump_file, stmt);
628 }
629
630 /* Transformations: */
631 /* The order of things in this conditional controls which
632 transformation is used when more than one is applicable. */
633 /* It is expected that any code added by the transformations
634 will be added before the current statement, and that the
635 current statement remain valid (although possibly
636 modified) upon return. */
637 if (gimple_mod_subtract_transform (&gsi)
638 || gimple_divmod_fixed_value_transform (&gsi)
639 || gimple_mod_pow2_value_transform (&gsi)
640 || gimple_stringops_transform (&gsi))
641 {
642 stmt = gsi_stmt (gsi);
643 changed = true;
644 /* Original statement may no longer be in the same block. */
645 if (bb != gimple_bb (stmt))
646 {
647 bb = gimple_bb (stmt);
648 gsi = gsi_for_stmt (stmt);
649 }
650 }
651
652 /* The function never thansforms a GIMPLE statement. */
653 if (dump_enabled_p ())
654 dump_ic_profile (&gsi);
655 }
656 }
657
658 return changed;
659 }
660
661 /* Generate code for transformation 1 (with parent gimple assignment
662 STMT and probability of taking the optimal path PROB, which is
663 equivalent to COUNT/ALL within roundoff error). This generates the
664 result into a temp and returns the temp; it does not replace or
665 alter the original STMT. */
666
667 static tree
668 gimple_divmod_fixed_value (gassign *stmt, tree value, profile_probability prob,
669 gcov_type count, gcov_type all)
670 {
671 gassign *stmt1, *stmt2;
672 gcond *stmt3;
673 tree tmp0, tmp1, tmp2;
674 gimple *bb1end, *bb2end, *bb3end;
675 basic_block bb, bb2, bb3, bb4;
676 tree optype, op1, op2;
677 edge e12, e13, e23, e24, e34;
678 gimple_stmt_iterator gsi;
679
680 gcc_assert (is_gimple_assign (stmt)
681 && (gimple_assign_rhs_code (stmt) == TRUNC_DIV_EXPR
682 || gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR));
683
684 optype = TREE_TYPE (gimple_assign_lhs (stmt));
685 op1 = gimple_assign_rhs1 (stmt);
686 op2 = gimple_assign_rhs2 (stmt);
687
688 bb = gimple_bb (stmt);
689 gsi = gsi_for_stmt (stmt);
690
691 tmp0 = make_temp_ssa_name (optype, NULL, "PROF");
692 tmp1 = make_temp_ssa_name (optype, NULL, "PROF");
693 stmt1 = gimple_build_assign (tmp0, fold_convert (optype, value));
694 stmt2 = gimple_build_assign (tmp1, op2);
695 stmt3 = gimple_build_cond (NE_EXPR, tmp1, tmp0, NULL_TREE, NULL_TREE);
696 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
697 gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
698 gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
699 bb1end = stmt3;
700
701 tmp2 = create_tmp_reg (optype, "PROF");
702 stmt1 = gimple_build_assign (tmp2, gimple_assign_rhs_code (stmt), op1, tmp0);
703 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
704 bb2end = stmt1;
705
706 stmt1 = gimple_build_assign (tmp2, gimple_assign_rhs_code (stmt), op1, op2);
707 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
708 bb3end = stmt1;
709
710 /* Fix CFG. */
711 /* Edge e23 connects bb2 to bb3, etc. */
712 e12 = split_block (bb, bb1end);
713 bb2 = e12->dest;
714 bb2->count = profile_count::from_gcov_type (count);
715 e23 = split_block (bb2, bb2end);
716 bb3 = e23->dest;
717 bb3->count = profile_count::from_gcov_type (all - count);
718 e34 = split_block (bb3, bb3end);
719 bb4 = e34->dest;
720 bb4->count = profile_count::from_gcov_type (all);
721
722 e12->flags &= ~EDGE_FALLTHRU;
723 e12->flags |= EDGE_FALSE_VALUE;
724 e12->probability = prob;
725
726 e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
727 e13->probability = prob.invert ();
728
729 remove_edge (e23);
730
731 e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
732 e24->probability = profile_probability::always ();
733
734 e34->probability = profile_probability::always ();
735
736 return tmp2;
737 }
738
739 /* Return the n-th value count of TOPN_VALUE histogram. If
740 there's a value, return true and set VALUE and COUNT
741 arguments.
742
743 Counters have the following meaning.
744
745 abs (counters[0]) is the number of executions
746 for i in 0 ... TOPN-1
747 counters[2 * i + 1] is target
748 abs (counters[2 * i + 2]) is corresponding hitrate counter.
749
750 Value of counters[0] negative when counter became
751 full during merging and some values are lost. */
752
753 bool
754 get_nth_most_common_value (gimple *stmt, const char *counter_type,
755 histogram_value hist, gcov_type *value,
756 gcov_type *count, gcov_type *all, unsigned n)
757 {
758 unsigned counters = hist->hvalue.counters[1];
759 if (n >= counters)
760 return false;
761
762 *count = 0;
763 *value = 0;
764
765 gcov_type read_all = abs_hwi (hist->hvalue.counters[0]);
766
767 gcov_type v = hist->hvalue.counters[2 * n + 2];
768 gcov_type c = hist->hvalue.counters[2 * n + 3];
769
770 if (hist->hvalue.counters[0] < 0
771 && (flag_profile_reproducible == PROFILE_REPRODUCIBILITY_PARALLEL_RUNS
772 || (flag_profile_reproducible
773 == PROFILE_REPRODUCIBILITY_MULTITHREADED)))
774 return false;
775
776 /* Indirect calls can't be verified. */
777 if (stmt
778 && check_counter (stmt, counter_type, &c, &read_all,
779 gimple_bb (stmt)->count))
780 return false;
781
782 *all = read_all;
783
784 *value = v;
785 *count = c;
786 return true;
787 }
788
789 /* Do transform 1) on INSN if applicable. */
790
791 static bool
792 gimple_divmod_fixed_value_transform (gimple_stmt_iterator *si)
793 {
794 histogram_value histogram;
795 enum tree_code code;
796 gcov_type val, count, all;
797 tree result, value, tree_val;
798 profile_probability prob;
799 gassign *stmt;
800
801 stmt = dyn_cast <gassign *> (gsi_stmt (*si));
802 if (!stmt)
803 return false;
804
805 if (!INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt))))
806 return false;
807
808 code = gimple_assign_rhs_code (stmt);
809
810 if (code != TRUNC_DIV_EXPR && code != TRUNC_MOD_EXPR)
811 return false;
812
813 histogram = gimple_histogram_value_of_type (cfun, stmt,
814 HIST_TYPE_TOPN_VALUES);
815 if (!histogram)
816 return false;
817
818 if (!get_nth_most_common_value (stmt, "divmod", histogram, &val, &count,
819 &all))
820 return false;
821
822 value = histogram->hvalue.value;
823 gimple_remove_histogram_value (cfun, stmt, histogram);
824
825 /* We require that count is at least half of all. */
826 if (simple_cst_equal (gimple_assign_rhs2 (stmt), value) != 1
827 || 2 * count < all
828 || optimize_bb_for_size_p (gimple_bb (stmt)))
829 return false;
830
831 /* Compute probability of taking the optimal path. */
832 if (all > 0)
833 prob = profile_probability::probability_in_gcov_type (count, all);
834 else
835 prob = profile_probability::never ();
836
837 if (sizeof (gcov_type) == sizeof (HOST_WIDE_INT))
838 tree_val = build_int_cst (get_gcov_type (), val);
839 else
840 {
841 HOST_WIDE_INT a[2];
842 a[0] = (unsigned HOST_WIDE_INT) val;
843 a[1] = val >> (HOST_BITS_PER_WIDE_INT - 1) >> 1;
844
845 tree_val = wide_int_to_tree (get_gcov_type (), wide_int::from_array (a, 2,
846 TYPE_PRECISION (get_gcov_type ()), false));
847 }
848 result = gimple_divmod_fixed_value (stmt, tree_val, prob, count, all);
849
850 if (dump_enabled_p ())
851 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
852 "Transformation done: div/mod by constant %T\n", tree_val);
853
854 gimple_assign_set_rhs_from_tree (si, result);
855 update_stmt (gsi_stmt (*si));
856
857 return true;
858 }
859
860 /* Generate code for transformation 2 (with parent gimple assign STMT and
861 probability of taking the optimal path PROB, which is equivalent to COUNT/ALL
862 within roundoff error). This generates the result into a temp and returns
863 the temp; it does not replace or alter the original STMT. */
864
865 static tree
866 gimple_mod_pow2 (gassign *stmt, profile_probability prob, gcov_type count, gcov_type all)
867 {
868 gassign *stmt1, *stmt2, *stmt3;
869 gcond *stmt4;
870 tree tmp2, tmp3;
871 gimple *bb1end, *bb2end, *bb3end;
872 basic_block bb, bb2, bb3, bb4;
873 tree optype, op1, op2;
874 edge e12, e13, e23, e24, e34;
875 gimple_stmt_iterator gsi;
876 tree result;
877
878 gcc_assert (is_gimple_assign (stmt)
879 && gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR);
880
881 optype = TREE_TYPE (gimple_assign_lhs (stmt));
882 op1 = gimple_assign_rhs1 (stmt);
883 op2 = gimple_assign_rhs2 (stmt);
884
885 bb = gimple_bb (stmt);
886 gsi = gsi_for_stmt (stmt);
887
888 result = create_tmp_reg (optype, "PROF");
889 tmp2 = make_temp_ssa_name (optype, NULL, "PROF");
890 tmp3 = make_temp_ssa_name (optype, NULL, "PROF");
891 stmt2 = gimple_build_assign (tmp2, PLUS_EXPR, op2,
892 build_int_cst (optype, -1));
893 stmt3 = gimple_build_assign (tmp3, BIT_AND_EXPR, tmp2, op2);
894 stmt4 = gimple_build_cond (NE_EXPR, tmp3, build_int_cst (optype, 0),
895 NULL_TREE, NULL_TREE);
896 gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
897 gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
898 gsi_insert_before (&gsi, stmt4, GSI_SAME_STMT);
899 bb1end = stmt4;
900
901 /* tmp2 == op2-1 inherited from previous block. */
902 stmt1 = gimple_build_assign (result, BIT_AND_EXPR, op1, tmp2);
903 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
904 bb2end = stmt1;
905
906 stmt1 = gimple_build_assign (result, gimple_assign_rhs_code (stmt),
907 op1, op2);
908 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
909 bb3end = stmt1;
910
911 /* Fix CFG. */
912 /* Edge e23 connects bb2 to bb3, etc. */
913 e12 = split_block (bb, bb1end);
914 bb2 = e12->dest;
915 bb2->count = profile_count::from_gcov_type (count);
916 e23 = split_block (bb2, bb2end);
917 bb3 = e23->dest;
918 bb3->count = profile_count::from_gcov_type (all - count);
919 e34 = split_block (bb3, bb3end);
920 bb4 = e34->dest;
921 bb4->count = profile_count::from_gcov_type (all);
922
923 e12->flags &= ~EDGE_FALLTHRU;
924 e12->flags |= EDGE_FALSE_VALUE;
925 e12->probability = prob;
926
927 e13 = make_edge (bb, bb3, EDGE_TRUE_VALUE);
928 e13->probability = prob.invert ();
929
930 remove_edge (e23);
931
932 e24 = make_edge (bb2, bb4, EDGE_FALLTHRU);
933 e24->probability = profile_probability::always ();
934
935 e34->probability = profile_probability::always ();
936
937 return result;
938 }
939
940 /* Do transform 2) on INSN if applicable. */
941
942 static bool
943 gimple_mod_pow2_value_transform (gimple_stmt_iterator *si)
944 {
945 histogram_value histogram;
946 enum tree_code code;
947 gcov_type count, wrong_values, all;
948 tree lhs_type, result, value;
949 profile_probability prob;
950 gassign *stmt;
951
952 stmt = dyn_cast <gassign *> (gsi_stmt (*si));
953 if (!stmt)
954 return false;
955
956 lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
957 if (!INTEGRAL_TYPE_P (lhs_type))
958 return false;
959
960 code = gimple_assign_rhs_code (stmt);
961
962 if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (lhs_type))
963 return false;
964
965 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_POW2);
966 if (!histogram)
967 return false;
968
969 value = histogram->hvalue.value;
970 wrong_values = histogram->hvalue.counters[0];
971 count = histogram->hvalue.counters[1];
972
973 gimple_remove_histogram_value (cfun, stmt, histogram);
974
975 /* We require that we hit a power of 2 at least half of all evaluations. */
976 if (simple_cst_equal (gimple_assign_rhs2 (stmt), value) != 1
977 || count < wrong_values
978 || optimize_bb_for_size_p (gimple_bb (stmt)))
979 return false;
980
981 /* Compute probability of taking the optimal path. */
982 all = count + wrong_values;
983
984 if (check_counter (stmt, "pow2", &count, &all, gimple_bb (stmt)->count))
985 return false;
986
987 if (dump_enabled_p ())
988 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
989 "Transformation done: mod power of 2\n");
990
991 if (all > 0)
992 prob = profile_probability::probability_in_gcov_type (count, all);
993 else
994 prob = profile_probability::never ();
995
996 result = gimple_mod_pow2 (stmt, prob, count, all);
997
998 gimple_assign_set_rhs_from_tree (si, result);
999 update_stmt (gsi_stmt (*si));
1000
1001 return true;
1002 }
1003
1004 /* Generate code for transformations 3 and 4 (with parent gimple assign STMT, and
1005 NCOUNTS the number of cases to support. Currently only NCOUNTS==0 or 1 is
1006 supported and this is built into this interface. The probabilities of taking
1007 the optimal paths are PROB1 and PROB2, which are equivalent to COUNT1/ALL and
1008 COUNT2/ALL respectively within roundoff error). This generates the
1009 result into a temp and returns the temp; it does not replace or alter
1010 the original STMT. */
1011 /* FIXME: Generalize the interface to handle NCOUNTS > 1. */
1012
1013 static tree
1014 gimple_mod_subtract (gassign *stmt, profile_probability prob1,
1015 profile_probability prob2, int ncounts,
1016 gcov_type count1, gcov_type count2, gcov_type all)
1017 {
1018 gassign *stmt1;
1019 gimple *stmt2;
1020 gcond *stmt3;
1021 tree tmp1;
1022 gimple *bb1end, *bb2end = NULL, *bb3end;
1023 basic_block bb, bb2, bb3, bb4;
1024 tree optype, op1, op2;
1025 edge e12, e23 = 0, e24, e34, e14;
1026 gimple_stmt_iterator gsi;
1027 tree result;
1028
1029 gcc_assert (is_gimple_assign (stmt)
1030 && gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR);
1031
1032 optype = TREE_TYPE (gimple_assign_lhs (stmt));
1033 op1 = gimple_assign_rhs1 (stmt);
1034 op2 = gimple_assign_rhs2 (stmt);
1035
1036 bb = gimple_bb (stmt);
1037 gsi = gsi_for_stmt (stmt);
1038
1039 result = create_tmp_reg (optype, "PROF");
1040 tmp1 = make_temp_ssa_name (optype, NULL, "PROF");
1041 stmt1 = gimple_build_assign (result, op1);
1042 stmt2 = gimple_build_assign (tmp1, op2);
1043 stmt3 = gimple_build_cond (LT_EXPR, result, tmp1, NULL_TREE, NULL_TREE);
1044 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
1045 gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
1046 gsi_insert_before (&gsi, stmt3, GSI_SAME_STMT);
1047 bb1end = stmt3;
1048
1049 if (ncounts) /* Assumed to be 0 or 1 */
1050 {
1051 stmt1 = gimple_build_assign (result, MINUS_EXPR, result, tmp1);
1052 stmt2 = gimple_build_cond (LT_EXPR, result, tmp1, NULL_TREE, NULL_TREE);
1053 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
1054 gsi_insert_before (&gsi, stmt2, GSI_SAME_STMT);
1055 bb2end = stmt2;
1056 }
1057
1058 /* Fallback case. */
1059 stmt1 = gimple_build_assign (result, gimple_assign_rhs_code (stmt),
1060 result, tmp1);
1061 gsi_insert_before (&gsi, stmt1, GSI_SAME_STMT);
1062 bb3end = stmt1;
1063
1064 /* Fix CFG. */
1065 /* Edge e23 connects bb2 to bb3, etc. */
1066 /* However block 3 is optional; if it is not there, references
1067 to 3 really refer to block 2. */
1068 e12 = split_block (bb, bb1end);
1069 bb2 = e12->dest;
1070 bb2->count = profile_count::from_gcov_type (all - count1);
1071
1072 if (ncounts) /* Assumed to be 0 or 1. */
1073 {
1074 e23 = split_block (bb2, bb2end);
1075 bb3 = e23->dest;
1076 bb3->count = profile_count::from_gcov_type (all - count1 - count2);
1077 }
1078
1079 e34 = split_block (ncounts ? bb3 : bb2, bb3end);
1080 bb4 = e34->dest;
1081 bb4->count = profile_count::from_gcov_type (all);
1082
1083 e12->flags &= ~EDGE_FALLTHRU;
1084 e12->flags |= EDGE_FALSE_VALUE;
1085 e12->probability = prob1.invert ();
1086
1087 e14 = make_edge (bb, bb4, EDGE_TRUE_VALUE);
1088 e14->probability = prob1;
1089
1090 if (ncounts) /* Assumed to be 0 or 1. */
1091 {
1092 e23->flags &= ~EDGE_FALLTHRU;
1093 e23->flags |= EDGE_FALSE_VALUE;
1094 e23->probability = prob2.invert ();
1095
1096 e24 = make_edge (bb2, bb4, EDGE_TRUE_VALUE);
1097 e24->probability = prob2;
1098 }
1099
1100 e34->probability = profile_probability::always ();
1101
1102 return result;
1103 }
1104
1105 /* Do transforms 3) and 4) on the statement pointed-to by SI if applicable. */
1106
1107 static bool
1108 gimple_mod_subtract_transform (gimple_stmt_iterator *si)
1109 {
1110 histogram_value histogram;
1111 enum tree_code code;
1112 gcov_type count, wrong_values, all;
1113 tree lhs_type, result;
1114 profile_probability prob1, prob2;
1115 unsigned int i, steps;
1116 gcov_type count1, count2;
1117 gassign *stmt;
1118 stmt = dyn_cast <gassign *> (gsi_stmt (*si));
1119 if (!stmt)
1120 return false;
1121
1122 lhs_type = TREE_TYPE (gimple_assign_lhs (stmt));
1123 if (!INTEGRAL_TYPE_P (lhs_type))
1124 return false;
1125
1126 code = gimple_assign_rhs_code (stmt);
1127
1128 if (code != TRUNC_MOD_EXPR || !TYPE_UNSIGNED (lhs_type))
1129 return false;
1130
1131 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_INTERVAL);
1132 if (!histogram)
1133 return false;
1134
1135 all = 0;
1136 wrong_values = 0;
1137 for (i = 0; i < histogram->hdata.intvl.steps; i++)
1138 all += histogram->hvalue.counters[i];
1139
1140 wrong_values += histogram->hvalue.counters[i];
1141 wrong_values += histogram->hvalue.counters[i+1];
1142 steps = histogram->hdata.intvl.steps;
1143 all += wrong_values;
1144 count1 = histogram->hvalue.counters[0];
1145 count2 = histogram->hvalue.counters[1];
1146
1147 if (check_counter (stmt, "interval", &count1, &all, gimple_bb (stmt)->count))
1148 {
1149 gimple_remove_histogram_value (cfun, stmt, histogram);
1150 return false;
1151 }
1152
1153 if (flag_profile_correction && count1 + count2 > all)
1154 all = count1 + count2;
1155
1156 gcc_assert (count1 + count2 <= all);
1157
1158 /* We require that we use just subtractions in at least 50% of all
1159 evaluations. */
1160 count = 0;
1161 for (i = 0; i < histogram->hdata.intvl.steps; i++)
1162 {
1163 count += histogram->hvalue.counters[i];
1164 if (count * 2 >= all)
1165 break;
1166 }
1167 if (i == steps
1168 || optimize_bb_for_size_p (gimple_bb (stmt)))
1169 return false;
1170
1171 gimple_remove_histogram_value (cfun, stmt, histogram);
1172 if (dump_enabled_p ())
1173 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
1174 "Transformation done: mod subtract\n");
1175
1176 /* Compute probability of taking the optimal path(s). */
1177 if (all > 0)
1178 {
1179 prob1 = profile_probability::probability_in_gcov_type (count1, all);
1180 prob2 = profile_probability::probability_in_gcov_type (count2, all);
1181 }
1182 else
1183 {
1184 prob1 = prob2 = profile_probability::never ();
1185 }
1186
1187 /* In practice, "steps" is always 2. This interface reflects this,
1188 and will need to be changed if "steps" can change. */
1189 result = gimple_mod_subtract (stmt, prob1, prob2, i, count1, count2, all);
1190
1191 gimple_assign_set_rhs_from_tree (si, result);
1192 update_stmt (gsi_stmt (*si));
1193
1194 return true;
1195 }
1196
1197 typedef int_hash <unsigned int, 0, UINT_MAX> profile_id_hash;
1198
1199 static hash_map<profile_id_hash, cgraph_node *> *cgraph_node_map = 0;
1200
1201 /* Returns true if node graph is initialized. This
1202 is used to test if profile_id has been created
1203 for cgraph_nodes. */
1204
1205 bool
1206 coverage_node_map_initialized_p (void)
1207 {
1208 return cgraph_node_map != 0;
1209 }
1210
1211 /* Initialize map from PROFILE_ID to CGRAPH_NODE.
1212 When LOCAL is true, the PROFILE_IDs are computed. when it is false we assume
1213 that the PROFILE_IDs was already assigned. */
1214
1215 void
1216 init_node_map (bool local)
1217 {
1218 struct cgraph_node *n;
1219 cgraph_node_map = new hash_map<profile_id_hash, cgraph_node *>;
1220
1221 FOR_EACH_DEFINED_FUNCTION (n)
1222 if (n->has_gimple_body_p () || n->thunk.thunk_p)
1223 {
1224 cgraph_node **val;
1225 dump_user_location_t loc
1226 = dump_user_location_t::from_function_decl (n->decl);
1227 if (local)
1228 {
1229 n->profile_id = coverage_compute_profile_id (n);
1230 while ((val = cgraph_node_map->get (n->profile_id))
1231 || !n->profile_id)
1232 {
1233 if (dump_enabled_p ())
1234 dump_printf_loc (MSG_MISSED_OPTIMIZATION, loc,
1235 "Local profile-id %i conflict"
1236 " with nodes %s %s\n",
1237 n->profile_id,
1238 n->dump_name (),
1239 (*val)->dump_name ());
1240 n->profile_id = (n->profile_id + 1) & 0x7fffffff;
1241 }
1242 }
1243 else if (!n->profile_id)
1244 {
1245 if (dump_enabled_p ())
1246 dump_printf_loc (MSG_MISSED_OPTIMIZATION, loc,
1247 "Node %s has no profile-id"
1248 " (profile feedback missing?)\n",
1249 n->dump_name ());
1250 continue;
1251 }
1252 else if ((val = cgraph_node_map->get (n->profile_id)))
1253 {
1254 if (dump_enabled_p ())
1255 dump_printf_loc (MSG_MISSED_OPTIMIZATION, loc,
1256 "Node %s has IP profile-id %i conflict. "
1257 "Giving up.\n",
1258 n->dump_name (), n->profile_id);
1259 *val = NULL;
1260 continue;
1261 }
1262 cgraph_node_map->put (n->profile_id, n);
1263 }
1264 }
1265
1266 /* Delete the CGRAPH_NODE_MAP. */
1267
1268 void
1269 del_node_map (void)
1270 {
1271 delete cgraph_node_map;
1272 }
1273
1274 /* Return cgraph node for function with pid */
1275
1276 struct cgraph_node*
1277 find_func_by_profile_id (int profile_id)
1278 {
1279 cgraph_node **val = cgraph_node_map->get (profile_id);
1280 if (val)
1281 return *val;
1282 else
1283 return NULL;
1284 }
1285
1286 /* Do transformation
1287
1288 if (actual_callee_address == address_of_most_common_function/method)
1289 do direct call
1290 else
1291 old call
1292 */
1293
1294 gcall *
1295 gimple_ic (gcall *icall_stmt, struct cgraph_node *direct_call,
1296 profile_probability prob)
1297 {
1298 gcall *dcall_stmt;
1299 gassign *load_stmt;
1300 gcond *cond_stmt;
1301 tree tmp0, tmp1, tmp;
1302 basic_block cond_bb, dcall_bb, icall_bb, join_bb = NULL;
1303 edge e_cd, e_ci, e_di, e_dj = NULL, e_ij;
1304 gimple_stmt_iterator gsi;
1305 int lp_nr, dflags;
1306 edge e_eh, e;
1307 edge_iterator ei;
1308
1309 cond_bb = gimple_bb (icall_stmt);
1310 gsi = gsi_for_stmt (icall_stmt);
1311
1312 tmp0 = make_temp_ssa_name (ptr_type_node, NULL, "PROF");
1313 tmp1 = make_temp_ssa_name (ptr_type_node, NULL, "PROF");
1314 tmp = unshare_expr (gimple_call_fn (icall_stmt));
1315 load_stmt = gimple_build_assign (tmp0, tmp);
1316 gsi_insert_before (&gsi, load_stmt, GSI_SAME_STMT);
1317
1318 tmp = fold_convert (ptr_type_node, build_addr (direct_call->decl));
1319 load_stmt = gimple_build_assign (tmp1, tmp);
1320 gsi_insert_before (&gsi, load_stmt, GSI_SAME_STMT);
1321
1322 cond_stmt = gimple_build_cond (EQ_EXPR, tmp1, tmp0, NULL_TREE, NULL_TREE);
1323 gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
1324
1325 if (TREE_CODE (gimple_vdef (icall_stmt)) == SSA_NAME)
1326 {
1327 unlink_stmt_vdef (icall_stmt);
1328 release_ssa_name (gimple_vdef (icall_stmt));
1329 }
1330 gimple_set_vdef (icall_stmt, NULL_TREE);
1331 gimple_set_vuse (icall_stmt, NULL_TREE);
1332 update_stmt (icall_stmt);
1333 dcall_stmt = as_a <gcall *> (gimple_copy (icall_stmt));
1334 gimple_call_set_fndecl (dcall_stmt, direct_call->decl);
1335 dflags = flags_from_decl_or_type (direct_call->decl);
1336 if ((dflags & ECF_NORETURN) != 0
1337 && should_remove_lhs_p (gimple_call_lhs (dcall_stmt)))
1338 gimple_call_set_lhs (dcall_stmt, NULL_TREE);
1339 gsi_insert_before (&gsi, dcall_stmt, GSI_SAME_STMT);
1340
1341 /* Fix CFG. */
1342 /* Edge e_cd connects cond_bb to dcall_bb, etc; note the first letters. */
1343 e_cd = split_block (cond_bb, cond_stmt);
1344 dcall_bb = e_cd->dest;
1345 dcall_bb->count = cond_bb->count.apply_probability (prob);
1346
1347 e_di = split_block (dcall_bb, dcall_stmt);
1348 icall_bb = e_di->dest;
1349 icall_bb->count = cond_bb->count - dcall_bb->count;
1350
1351 /* Do not disturb existing EH edges from the indirect call. */
1352 if (!stmt_ends_bb_p (icall_stmt))
1353 e_ij = split_block (icall_bb, icall_stmt);
1354 else
1355 {
1356 e_ij = find_fallthru_edge (icall_bb->succs);
1357 /* The indirect call might be noreturn. */
1358 if (e_ij != NULL)
1359 {
1360 e_ij->probability = profile_probability::always ();
1361 e_ij = single_pred_edge (split_edge (e_ij));
1362 }
1363 }
1364 if (e_ij != NULL)
1365 {
1366 join_bb = e_ij->dest;
1367 join_bb->count = cond_bb->count;
1368 }
1369
1370 e_cd->flags = (e_cd->flags & ~EDGE_FALLTHRU) | EDGE_TRUE_VALUE;
1371 e_cd->probability = prob;
1372
1373 e_ci = make_edge (cond_bb, icall_bb, EDGE_FALSE_VALUE);
1374 e_ci->probability = prob.invert ();
1375
1376 remove_edge (e_di);
1377
1378 if (e_ij != NULL)
1379 {
1380 if ((dflags & ECF_NORETURN) == 0)
1381 {
1382 e_dj = make_edge (dcall_bb, join_bb, EDGE_FALLTHRU);
1383 e_dj->probability = profile_probability::always ();
1384 }
1385 e_ij->probability = profile_probability::always ();
1386 }
1387
1388 /* Insert PHI node for the call result if necessary. */
1389 if (gimple_call_lhs (icall_stmt)
1390 && TREE_CODE (gimple_call_lhs (icall_stmt)) == SSA_NAME
1391 && (dflags & ECF_NORETURN) == 0)
1392 {
1393 tree result = gimple_call_lhs (icall_stmt);
1394 gphi *phi = create_phi_node (result, join_bb);
1395 gimple_call_set_lhs (icall_stmt,
1396 duplicate_ssa_name (result, icall_stmt));
1397 add_phi_arg (phi, gimple_call_lhs (icall_stmt), e_ij, UNKNOWN_LOCATION);
1398 gimple_call_set_lhs (dcall_stmt,
1399 duplicate_ssa_name (result, dcall_stmt));
1400 add_phi_arg (phi, gimple_call_lhs (dcall_stmt), e_dj, UNKNOWN_LOCATION);
1401 }
1402
1403 /* Build an EH edge for the direct call if necessary. */
1404 lp_nr = lookup_stmt_eh_lp (icall_stmt);
1405 if (lp_nr > 0 && stmt_could_throw_p (cfun, dcall_stmt))
1406 {
1407 add_stmt_to_eh_lp (dcall_stmt, lp_nr);
1408 }
1409
1410 FOR_EACH_EDGE (e_eh, ei, icall_bb->succs)
1411 if (e_eh->flags & (EDGE_EH | EDGE_ABNORMAL))
1412 {
1413 e = make_edge (dcall_bb, e_eh->dest, e_eh->flags);
1414 e->probability = e_eh->probability;
1415 for (gphi_iterator psi = gsi_start_phis (e_eh->dest);
1416 !gsi_end_p (psi); gsi_next (&psi))
1417 {
1418 gphi *phi = psi.phi ();
1419 SET_USE (PHI_ARG_DEF_PTR_FROM_EDGE (phi, e),
1420 PHI_ARG_DEF_FROM_EDGE (phi, e_eh));
1421 }
1422 }
1423 if (!stmt_could_throw_p (cfun, dcall_stmt))
1424 gimple_purge_dead_eh_edges (dcall_bb);
1425 return dcall_stmt;
1426 }
1427
1428 /* Dump info about indirect call profile. */
1429
1430 static void
1431 dump_ic_profile (gimple_stmt_iterator *gsi)
1432 {
1433 gcall *stmt;
1434 histogram_value histogram;
1435 gcov_type val, count, all;
1436 struct cgraph_node *direct_call;
1437
1438 stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
1439 if (!stmt)
1440 return;
1441
1442 if (gimple_call_fndecl (stmt) != NULL_TREE)
1443 return;
1444
1445 if (gimple_call_internal_p (stmt))
1446 return;
1447
1448 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_INDIR_CALL);
1449 if (!histogram)
1450 return;
1451
1452 count = 0;
1453 all = histogram->hvalue.counters[0];
1454
1455 for (unsigned j = 0; j < GCOV_TOPN_MAXIMUM_TRACKED_VALUES; j++)
1456 {
1457 if (!get_nth_most_common_value (NULL, "indirect call", histogram, &val,
1458 &count, &all, j))
1459 return;
1460 if (!count)
1461 continue;
1462
1463 direct_call = find_func_by_profile_id ((int) val);
1464
1465 if (direct_call == NULL)
1466 dump_printf_loc (
1467 MSG_MISSED_OPTIMIZATION, stmt,
1468 "Indirect call -> direct call from other "
1469 "module %T=> %i (will resolve by ipa-profile only with LTO)\n",
1470 gimple_call_fn (stmt), (int) val);
1471 else
1472 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
1473 "Indirect call -> direct call "
1474 "%T => %T (will resolve by ipa-profile)\n",
1475 gimple_call_fn (stmt), direct_call->decl);
1476 dump_printf_loc (MSG_NOTE, stmt,
1477 "hist->count %" PRId64 " hist->all %" PRId64 "\n",
1478 count, all);
1479 }
1480 }
1481
1482 /* Return true if the stringop CALL shall be profiled. SIZE_ARG be
1483 set to the argument index for the size of the string operation. */
1484
1485 static bool
1486 interesting_stringop_to_profile_p (gcall *call, int *size_arg)
1487 {
1488 enum built_in_function fcode;
1489
1490 fcode = DECL_FUNCTION_CODE (gimple_call_fndecl (call));
1491 switch (fcode)
1492 {
1493 case BUILT_IN_MEMCPY:
1494 case BUILT_IN_MEMPCPY:
1495 case BUILT_IN_MEMMOVE:
1496 *size_arg = 2;
1497 return validate_gimple_arglist (call, POINTER_TYPE, POINTER_TYPE,
1498 INTEGER_TYPE, VOID_TYPE);
1499 case BUILT_IN_MEMSET:
1500 *size_arg = 2;
1501 return validate_gimple_arglist (call, POINTER_TYPE, INTEGER_TYPE,
1502 INTEGER_TYPE, VOID_TYPE);
1503 case BUILT_IN_BZERO:
1504 *size_arg = 1;
1505 return validate_gimple_arglist (call, POINTER_TYPE, INTEGER_TYPE,
1506 VOID_TYPE);
1507 default:
1508 return false;
1509 }
1510 }
1511
1512 /* Convert stringop (..., vcall_size)
1513 into
1514 if (vcall_size == icall_size)
1515 stringop (..., icall_size);
1516 else
1517 stringop (..., vcall_size);
1518 assuming we'll propagate a true constant into ICALL_SIZE later. */
1519
1520 static void
1521 gimple_stringop_fixed_value (gcall *vcall_stmt, tree icall_size, profile_probability prob,
1522 gcov_type count, gcov_type all)
1523 {
1524 gassign *tmp_stmt;
1525 gcond *cond_stmt;
1526 gcall *icall_stmt;
1527 tree tmp0, tmp1, vcall_size, optype;
1528 basic_block cond_bb, icall_bb, vcall_bb, join_bb;
1529 edge e_ci, e_cv, e_iv, e_ij, e_vj;
1530 gimple_stmt_iterator gsi;
1531 int size_arg;
1532
1533 if (!interesting_stringop_to_profile_p (vcall_stmt, &size_arg))
1534 gcc_unreachable ();
1535
1536 cond_bb = gimple_bb (vcall_stmt);
1537 gsi = gsi_for_stmt (vcall_stmt);
1538
1539 vcall_size = gimple_call_arg (vcall_stmt, size_arg);
1540 optype = TREE_TYPE (vcall_size);
1541
1542 tmp0 = make_temp_ssa_name (optype, NULL, "PROF");
1543 tmp1 = make_temp_ssa_name (optype, NULL, "PROF");
1544 tmp_stmt = gimple_build_assign (tmp0, fold_convert (optype, icall_size));
1545 gsi_insert_before (&gsi, tmp_stmt, GSI_SAME_STMT);
1546
1547 tmp_stmt = gimple_build_assign (tmp1, vcall_size);
1548 gsi_insert_before (&gsi, tmp_stmt, GSI_SAME_STMT);
1549
1550 cond_stmt = gimple_build_cond (EQ_EXPR, tmp1, tmp0, NULL_TREE, NULL_TREE);
1551 gsi_insert_before (&gsi, cond_stmt, GSI_SAME_STMT);
1552
1553 if (TREE_CODE (gimple_vdef (vcall_stmt)) == SSA_NAME)
1554 {
1555 unlink_stmt_vdef (vcall_stmt);
1556 release_ssa_name (gimple_vdef (vcall_stmt));
1557 }
1558 gimple_set_vdef (vcall_stmt, NULL);
1559 gimple_set_vuse (vcall_stmt, NULL);
1560 update_stmt (vcall_stmt);
1561 icall_stmt = as_a <gcall *> (gimple_copy (vcall_stmt));
1562 gimple_call_set_arg (icall_stmt, size_arg,
1563 fold_convert (optype, icall_size));
1564 gsi_insert_before (&gsi, icall_stmt, GSI_SAME_STMT);
1565
1566 /* Fix CFG. */
1567 /* Edge e_ci connects cond_bb to icall_bb, etc. */
1568 e_ci = split_block (cond_bb, cond_stmt);
1569 icall_bb = e_ci->dest;
1570 icall_bb->count = profile_count::from_gcov_type (count);
1571
1572 e_iv = split_block (icall_bb, icall_stmt);
1573 vcall_bb = e_iv->dest;
1574 vcall_bb->count = profile_count::from_gcov_type (all - count);
1575
1576 e_vj = split_block (vcall_bb, vcall_stmt);
1577 join_bb = e_vj->dest;
1578 join_bb->count = profile_count::from_gcov_type (all);
1579
1580 e_ci->flags = (e_ci->flags & ~EDGE_FALLTHRU) | EDGE_TRUE_VALUE;
1581 e_ci->probability = prob;
1582
1583 e_cv = make_edge (cond_bb, vcall_bb, EDGE_FALSE_VALUE);
1584 e_cv->probability = prob.invert ();
1585
1586 remove_edge (e_iv);
1587
1588 e_ij = make_edge (icall_bb, join_bb, EDGE_FALLTHRU);
1589 e_ij->probability = profile_probability::always ();
1590
1591 e_vj->probability = profile_probability::always ();
1592
1593 /* Insert PHI node for the call result if necessary. */
1594 if (gimple_call_lhs (vcall_stmt)
1595 && TREE_CODE (gimple_call_lhs (vcall_stmt)) == SSA_NAME)
1596 {
1597 tree result = gimple_call_lhs (vcall_stmt);
1598 gphi *phi = create_phi_node (result, join_bb);
1599 gimple_call_set_lhs (vcall_stmt,
1600 duplicate_ssa_name (result, vcall_stmt));
1601 add_phi_arg (phi, gimple_call_lhs (vcall_stmt), e_vj, UNKNOWN_LOCATION);
1602 gimple_call_set_lhs (icall_stmt,
1603 duplicate_ssa_name (result, icall_stmt));
1604 add_phi_arg (phi, gimple_call_lhs (icall_stmt), e_ij, UNKNOWN_LOCATION);
1605 }
1606
1607 /* Because these are all string op builtins, they're all nothrow. */
1608 gcc_assert (!stmt_could_throw_p (cfun, vcall_stmt));
1609 gcc_assert (!stmt_could_throw_p (cfun, icall_stmt));
1610 }
1611
1612 /* Find values inside STMT for that we want to measure histograms for
1613 division/modulo optimization. */
1614
1615 static bool
1616 gimple_stringops_transform (gimple_stmt_iterator *gsi)
1617 {
1618 gcall *stmt;
1619 tree blck_size;
1620 enum built_in_function fcode;
1621 histogram_value histogram;
1622 gcov_type count, all, val;
1623 tree dest, src;
1624 unsigned int dest_align, src_align;
1625 profile_probability prob;
1626 tree tree_val;
1627 int size_arg;
1628
1629 stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
1630 if (!stmt)
1631 return false;
1632
1633 if (!gimple_call_builtin_p (gsi_stmt (*gsi), BUILT_IN_NORMAL))
1634 return false;
1635
1636 if (!interesting_stringop_to_profile_p (stmt, &size_arg))
1637 return false;
1638
1639 blck_size = gimple_call_arg (stmt, size_arg);
1640 if (TREE_CODE (blck_size) == INTEGER_CST)
1641 return false;
1642
1643 histogram = gimple_histogram_value_of_type (cfun, stmt,
1644 HIST_TYPE_TOPN_VALUES);
1645 if (!histogram)
1646 return false;
1647
1648 if (!get_nth_most_common_value (stmt, "stringops", histogram, &val, &count,
1649 &all))
1650 return false;
1651
1652 gimple_remove_histogram_value (cfun, stmt, histogram);
1653
1654 /* We require that count is at least half of all. */
1655 if (2 * count < all || optimize_bb_for_size_p (gimple_bb (stmt)))
1656 return false;
1657 if (check_counter (stmt, "value", &count, &all, gimple_bb (stmt)->count))
1658 return false;
1659 if (all > 0)
1660 prob = profile_probability::probability_in_gcov_type (count, all);
1661 else
1662 prob = profile_probability::never ();
1663
1664 dest = gimple_call_arg (stmt, 0);
1665 dest_align = get_pointer_alignment (dest);
1666 fcode = DECL_FUNCTION_CODE (gimple_call_fndecl (stmt));
1667 switch (fcode)
1668 {
1669 case BUILT_IN_MEMCPY:
1670 case BUILT_IN_MEMPCPY:
1671 case BUILT_IN_MEMMOVE:
1672 src = gimple_call_arg (stmt, 1);
1673 src_align = get_pointer_alignment (src);
1674 if (!can_move_by_pieces (val, MIN (dest_align, src_align)))
1675 return false;
1676 break;
1677 case BUILT_IN_MEMSET:
1678 if (!can_store_by_pieces (val, builtin_memset_read_str,
1679 gimple_call_arg (stmt, 1),
1680 dest_align, true))
1681 return false;
1682 break;
1683 case BUILT_IN_BZERO:
1684 if (!can_store_by_pieces (val, builtin_memset_read_str,
1685 integer_zero_node,
1686 dest_align, true))
1687 return false;
1688 break;
1689 default:
1690 gcc_unreachable ();
1691 }
1692
1693 if (sizeof (gcov_type) == sizeof (HOST_WIDE_INT))
1694 tree_val = build_int_cst (get_gcov_type (), val);
1695 else
1696 {
1697 HOST_WIDE_INT a[2];
1698 a[0] = (unsigned HOST_WIDE_INT) val;
1699 a[1] = val >> (HOST_BITS_PER_WIDE_INT - 1) >> 1;
1700
1701 tree_val = wide_int_to_tree (get_gcov_type (), wide_int::from_array (a, 2,
1702 TYPE_PRECISION (get_gcov_type ()), false));
1703 }
1704
1705 if (dump_enabled_p ())
1706 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
1707 "Transformation done: single value %i stringop for %s\n",
1708 (int)val, built_in_names[(int)fcode]);
1709
1710 gimple_stringop_fixed_value (stmt, tree_val, prob, count, all);
1711
1712 return true;
1713 }
1714
1715 void
1716 stringop_block_profile (gimple *stmt, unsigned int *expected_align,
1717 HOST_WIDE_INT *expected_size)
1718 {
1719 histogram_value histogram;
1720 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_AVERAGE);
1721
1722 if (!histogram)
1723 *expected_size = -1;
1724 else if (!histogram->hvalue.counters[1])
1725 {
1726 *expected_size = -1;
1727 gimple_remove_histogram_value (cfun, stmt, histogram);
1728 }
1729 else
1730 {
1731 gcov_type size;
1732 size = ((histogram->hvalue.counters[0]
1733 + histogram->hvalue.counters[1] / 2)
1734 / histogram->hvalue.counters[1]);
1735 /* Even if we can hold bigger value in SIZE, INT_MAX
1736 is safe "infinity" for code generation strategies. */
1737 if (size > INT_MAX)
1738 size = INT_MAX;
1739 *expected_size = size;
1740 gimple_remove_histogram_value (cfun, stmt, histogram);
1741 }
1742
1743 histogram = gimple_histogram_value_of_type (cfun, stmt, HIST_TYPE_IOR);
1744
1745 if (!histogram)
1746 *expected_align = 0;
1747 else if (!histogram->hvalue.counters[0])
1748 {
1749 gimple_remove_histogram_value (cfun, stmt, histogram);
1750 *expected_align = 0;
1751 }
1752 else
1753 {
1754 gcov_type count;
1755 unsigned int alignment;
1756
1757 count = histogram->hvalue.counters[0];
1758 alignment = 1;
1759 while (!(count & alignment)
1760 && (alignment <= UINT_MAX / 2 / BITS_PER_UNIT))
1761 alignment <<= 1;
1762 *expected_align = alignment * BITS_PER_UNIT;
1763 gimple_remove_histogram_value (cfun, stmt, histogram);
1764 }
1765 }
1766
1767 \f
1768 /* Find values inside STMT for that we want to measure histograms for
1769 division/modulo optimization. */
1770
1771 static void
1772 gimple_divmod_values_to_profile (gimple *stmt, histogram_values *values)
1773 {
1774 tree lhs, divisor, op0, type;
1775 histogram_value hist;
1776
1777 if (gimple_code (stmt) != GIMPLE_ASSIGN)
1778 return;
1779
1780 lhs = gimple_assign_lhs (stmt);
1781 type = TREE_TYPE (lhs);
1782 if (!INTEGRAL_TYPE_P (type))
1783 return;
1784
1785 switch (gimple_assign_rhs_code (stmt))
1786 {
1787 case TRUNC_DIV_EXPR:
1788 case TRUNC_MOD_EXPR:
1789 divisor = gimple_assign_rhs2 (stmt);
1790 op0 = gimple_assign_rhs1 (stmt);
1791
1792 if (TREE_CODE (divisor) == SSA_NAME)
1793 /* Check for the case where the divisor is the same value most
1794 of the time. */
1795 values->safe_push (gimple_alloc_histogram_value (cfun,
1796 HIST_TYPE_TOPN_VALUES,
1797 stmt, divisor));
1798
1799 /* For mod, check whether it is not often a noop (or replaceable by
1800 a few subtractions). */
1801 if (gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR
1802 && TYPE_UNSIGNED (type)
1803 && TREE_CODE (divisor) == SSA_NAME)
1804 {
1805 tree val;
1806 /* Check for a special case where the divisor is power of 2. */
1807 values->safe_push (gimple_alloc_histogram_value (cfun,
1808 HIST_TYPE_POW2,
1809 stmt, divisor));
1810 val = build2 (TRUNC_DIV_EXPR, type, op0, divisor);
1811 hist = gimple_alloc_histogram_value (cfun, HIST_TYPE_INTERVAL,
1812 stmt, val);
1813 hist->hdata.intvl.int_start = 0;
1814 hist->hdata.intvl.steps = 2;
1815 values->safe_push (hist);
1816 }
1817 return;
1818
1819 default:
1820 return;
1821 }
1822 }
1823
1824 /* Find calls inside STMT for that we want to measure histograms for
1825 indirect/virtual call optimization. */
1826
1827 static void
1828 gimple_indirect_call_to_profile (gimple *stmt, histogram_values *values)
1829 {
1830 tree callee;
1831
1832 if (gimple_code (stmt) != GIMPLE_CALL
1833 || gimple_call_internal_p (stmt)
1834 || gimple_call_fndecl (stmt) != NULL_TREE)
1835 return;
1836
1837 callee = gimple_call_fn (stmt);
1838 histogram_value v = gimple_alloc_histogram_value (cfun, HIST_TYPE_INDIR_CALL,
1839 stmt, callee);
1840 values->safe_push (v);
1841
1842 return;
1843 }
1844
1845 /* Find values inside STMT for that we want to measure histograms for
1846 string operations. */
1847
1848 static void
1849 gimple_stringops_values_to_profile (gimple *gs, histogram_values *values)
1850 {
1851 gcall *stmt;
1852 tree blck_size;
1853 tree dest;
1854 int size_arg;
1855
1856 stmt = dyn_cast <gcall *> (gs);
1857 if (!stmt)
1858 return;
1859
1860 if (!gimple_call_builtin_p (gs, BUILT_IN_NORMAL))
1861 return;
1862
1863 if (!interesting_stringop_to_profile_p (stmt, &size_arg))
1864 return;
1865
1866 dest = gimple_call_arg (stmt, 0);
1867 blck_size = gimple_call_arg (stmt, size_arg);
1868
1869 if (TREE_CODE (blck_size) != INTEGER_CST)
1870 {
1871 values->safe_push (gimple_alloc_histogram_value (cfun,
1872 HIST_TYPE_TOPN_VALUES,
1873 stmt, blck_size));
1874 values->safe_push (gimple_alloc_histogram_value (cfun, HIST_TYPE_AVERAGE,
1875 stmt, blck_size));
1876 }
1877
1878 if (TREE_CODE (blck_size) != INTEGER_CST)
1879 values->safe_push (gimple_alloc_histogram_value (cfun, HIST_TYPE_IOR,
1880 stmt, dest));
1881 }
1882
1883 /* Find values inside STMT for that we want to measure histograms and adds
1884 them to list VALUES. */
1885
1886 static void
1887 gimple_values_to_profile (gimple *stmt, histogram_values *values)
1888 {
1889 gimple_divmod_values_to_profile (stmt, values);
1890 gimple_stringops_values_to_profile (stmt, values);
1891 gimple_indirect_call_to_profile (stmt, values);
1892 }
1893
1894 void
1895 gimple_find_values_to_profile (histogram_values *values)
1896 {
1897 basic_block bb;
1898 gimple_stmt_iterator gsi;
1899 unsigned i;
1900 histogram_value hist = NULL;
1901 values->create (0);
1902
1903 FOR_EACH_BB_FN (bb, cfun)
1904 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
1905 gimple_values_to_profile (gsi_stmt (gsi), values);
1906
1907 values->safe_push (gimple_alloc_histogram_value (cfun,
1908 HIST_TYPE_TIME_PROFILE));
1909
1910 FOR_EACH_VEC_ELT (*values, i, hist)
1911 {
1912 switch (hist->type)
1913 {
1914 case HIST_TYPE_INTERVAL:
1915 hist->n_counters = hist->hdata.intvl.steps + 2;
1916 break;
1917
1918 case HIST_TYPE_POW2:
1919 hist->n_counters = 2;
1920 break;
1921
1922 case HIST_TYPE_TOPN_VALUES:
1923 case HIST_TYPE_INDIR_CALL:
1924 hist->n_counters = GCOV_TOPN_MEM_COUNTERS;
1925 break;
1926
1927 case HIST_TYPE_TIME_PROFILE:
1928 hist->n_counters = 1;
1929 break;
1930
1931 case HIST_TYPE_AVERAGE:
1932 hist->n_counters = 2;
1933 break;
1934
1935 case HIST_TYPE_IOR:
1936 hist->n_counters = 1;
1937 break;
1938
1939 default:
1940 gcc_unreachable ();
1941 }
1942 if (dump_file && hist->hvalue.stmt != NULL)
1943 {
1944 fprintf (dump_file, "Stmt ");
1945 print_gimple_stmt (dump_file, hist->hvalue.stmt, 0, TDF_SLIM);
1946 dump_histogram_value (dump_file, hist);
1947 }
1948 }
1949 }