perf_cgroup_match(event);
}
+static inline bool is_event_in_freq_mode(struct perf_event *event)
+{
+ return event->attr.freq && event->attr.sample_freq;
+}
+
static void
event_sched_out(struct perf_event *event, struct perf_event_context *ctx)
{
if (!is_software_event(event))
cpc->active_oncpu--;
- if (event->attr.freq && event->attr.sample_freq) {
+ if (is_event_in_freq_mode(event)) {
ctx->nr_freq--;
epc->nr_freq--;
}
if (!is_software_event(event))
cpc->active_oncpu++;
- if (event->attr.freq && event->attr.sample_freq) {
+ if (is_event_in_freq_mode(event)) {
ctx->nr_freq++;
epc->nr_freq++;
}
if (hwc->interrupts == MAX_INTERRUPTS) {
hwc->interrupts = 0;
perf_log_throttle(event, 1);
- if (!event->attr.freq || !event->attr.sample_freq)
+ if (!is_event_in_freq_mode(event))
event->pmu->start(event, 0);
}
- if (!event->attr.freq || !event->attr.sample_freq)
+ if (!is_event_in_freq_mode(event))
continue;
/*
hwc = &event->hw;
hwc->sample_period = attr->sample_period;
- if (attr->freq && attr->sample_freq)
+ if (is_event_in_freq_mode(event))
hwc->sample_period = 1;
hwc->last_period = hwc->sample_period;