#define ARMV8_PMCR_D (1 << 3) /* CCNT counts every 64th cpu cycle */
#define ARMV8_PMCR_X (1 << 4) /* Export to ETM */
#define ARMV8_PMCR_DP (1 << 5) /* Disable CCNT if non-invasive debug*/
+#define ARMV8_PMCR_LC (1 << 6) /* Overflow on 64 bit cycle counter */
#define ARMV8_PMCR_N_SHIFT 11 /* Number of counters supported */
#define ARMV8_PMCR_N_MASK 0x1f
#define ARMV8_PMCR_MASK 0x3f /* Mask for writable bits */
if (!armv8pmu_counter_valid(cpu_pmu, idx))
pr_err("CPU%u writing wrong counter %d\n",
smp_processor_id(), idx);
- else if (idx == ARMV8_IDX_CYCLE_COUNTER)
- asm volatile("msr pmccntr_el0, %0" :: "r" (value));
- else if (armv8pmu_select_counter(idx) == idx)
+ else if (idx == ARMV8_IDX_CYCLE_COUNTER) {
+ /*
+ * Set the upper 32bits as this is a 64bit counter but we only
+ * count using the lower 32bits and we want an interrupt when
+ * it overflows.
+ */
+ u64 value64 = 0xffffffff00000000ULL | value;
+
+ asm volatile("msr pmccntr_el0, %0" :: "r" (value64));
+ } else if (armv8pmu_select_counter(idx) == idx)
asm volatile("msr pmxevcntr_el0, %0" :: "r" (value));
}
armv8pmu_disable_intens(idx);
}
- /* Initialize & Reset PMNC: C and P bits. */
- armv8pmu_pmcr_write(ARMV8_PMCR_P | ARMV8_PMCR_C);
+ /*
+ * Initialize & Reset PMNC. Request overflow interrupt for
+ * 64 bit cycle counter but cheat in armv8pmu_write_counter().
+ */
+ armv8pmu_pmcr_write(ARMV8_PMCR_P | ARMV8_PMCR_C | ARMV8_PMCR_LC);
}
static int armv8_pmuv3_map_event(struct perf_event *event)