Skip to content

Commit 0bbff9e

Browse files
robherringwilldeacon
authored andcommitted
perf/arm_pmuv3: Add PMUv3.9 per counter EL0 access control
Armv8.9/9.4 PMUv3.9 adds per counter EL0 access controls. Per counter access is enabled with the UEN bit in PMUSERENR_EL1 register. Individual counters are enabled/disabled in the PMUACR_EL1 register. When UEN is set, the CR/ER bits control EL0 write access and must be set to disable write access. With the access controls, the clearing of unused counters can be skipped. KVM also configures PMUSERENR_EL1 in order to trap to EL2. UEN does not need to be set for it since only PMUv3.5 is exposed to guests. Signed-off-by: Rob Herring (Arm) <[email protected]> Link: https://lore.kernel.org/r/[email protected] Signed-off-by: Will Deacon <[email protected]>
1 parent 759b5fc commit 0bbff9e

File tree

5 files changed

+44
-10
lines changed

5 files changed

+44
-10
lines changed

arch/arm/include/asm/arm_pmuv3.h

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -231,6 +231,7 @@ static inline void kvm_vcpu_pmu_resync_el0(void) {}
231231
#define ARMV8_PMU_DFR_VER_V3P1 0x4
232232
#define ARMV8_PMU_DFR_VER_V3P4 0x5
233233
#define ARMV8_PMU_DFR_VER_V3P5 0x6
234+
#define ARMV8_PMU_DFR_VER_V3P9 0x9
234235
#define ARMV8_PMU_DFR_VER_IMP_DEF 0xF
235236

236237
static inline bool pmuv3_implemented(int pmuver)
@@ -249,6 +250,11 @@ static inline bool is_pmuv3p5(int pmuver)
249250
return pmuver >= ARMV8_PMU_DFR_VER_V3P5;
250251
}
251252

253+
static inline bool is_pmuv3p9(int pmuver)
254+
{
255+
return pmuver >= ARMV8_PMU_DFR_VER_V3P9;
256+
}
257+
252258
static inline u64 read_pmceid0(void)
253259
{
254260
u64 val = read_sysreg(PMCEID0);

arch/arm64/include/asm/arm_pmuv3.h

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -152,6 +152,11 @@ static inline void write_pmuserenr(u32 val)
152152
write_sysreg(val, pmuserenr_el0);
153153
}
154154

155+
static inline void write_pmuacr(u64 val)
156+
{
157+
write_sysreg_s(val, SYS_PMUACR_EL1);
158+
}
159+
155160
static inline u64 read_pmceid0(void)
156161
{
157162
return read_sysreg(pmceid0_el0);
@@ -178,4 +183,9 @@ static inline bool is_pmuv3p5(int pmuver)
178183
return pmuver >= ID_AA64DFR0_EL1_PMUVer_V3P5;
179184
}
180185

186+
static inline bool is_pmuv3p9(int pmuver)
187+
{
188+
return pmuver >= ID_AA64DFR0_EL1_PMUVer_V3P9;
189+
}
190+
181191
#endif

arch/arm64/tools/sysreg

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1238,6 +1238,7 @@ UnsignedEnum 11:8 PMUVer
12381238
0b0110 V3P5
12391239
0b0111 V3P7
12401240
0b1000 V3P8
1241+
0b1001 V3P9
12411242
0b1111 IMP_DEF
12421243
EndEnum
12431244
UnsignedEnum 7:4 TraceVer
@@ -2178,6 +2179,13 @@ Field 4 P
21782179
Field 3:0 ALIGN
21792180
EndSysreg
21802181

2182+
Sysreg PMUACR_EL1 3 0 9 14 4
2183+
Res0 63:33
2184+
Field 32 F0
2185+
Field 31 C
2186+
Field 30:0 P
2187+
EndSysreg
2188+
21812189
Sysreg PMSELR_EL0 3 3 9 12 5
21822190
Res0 63:5
21832191
Field 4:0 SEL

drivers/perf/arm_pmuv3.c

Lines changed: 19 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -770,18 +770,27 @@ static void armv8pmu_enable_user_access(struct arm_pmu *cpu_pmu)
770770
int i;
771771
struct pmu_hw_events *cpuc = this_cpu_ptr(cpu_pmu->hw_events);
772772

773-
/* Clear any unused counters to avoid leaking their contents */
774-
for_each_andnot_bit(i, cpu_pmu->cntr_mask, cpuc->used_mask,
775-
ARMPMU_MAX_HWEVENTS) {
776-
if (i == ARMV8_PMU_CYCLE_IDX)
777-
write_pmccntr(0);
778-
else if (i == ARMV8_PMU_INSTR_IDX)
779-
write_pmicntr(0);
780-
else
781-
armv8pmu_write_evcntr(i, 0);
773+
if (is_pmuv3p9(cpu_pmu->pmuver)) {
774+
u64 mask = 0;
775+
for_each_set_bit(i, cpuc->used_mask, ARMPMU_MAX_HWEVENTS) {
776+
if (armv8pmu_event_has_user_read(cpuc->events[i]))
777+
mask |= BIT(i);
778+
}
779+
write_pmuacr(mask);
780+
} else {
781+
/* Clear any unused counters to avoid leaking their contents */
782+
for_each_andnot_bit(i, cpu_pmu->cntr_mask, cpuc->used_mask,
783+
ARMPMU_MAX_HWEVENTS) {
784+
if (i == ARMV8_PMU_CYCLE_IDX)
785+
write_pmccntr(0);
786+
else if (i == ARMV8_PMU_INSTR_IDX)
787+
write_pmicntr(0);
788+
else
789+
armv8pmu_write_evcntr(i, 0);
790+
}
782791
}
783792

784-
update_pmuserenr(ARMV8_PMU_USERENR_ER | ARMV8_PMU_USERENR_CR);
793+
update_pmuserenr(ARMV8_PMU_USERENR_ER | ARMV8_PMU_USERENR_CR | ARMV8_PMU_USERENR_UEN);
785794
}
786795

787796
static void armv8pmu_enable_event(struct perf_event *event)

include/linux/perf/arm_pmuv3.h

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -257,6 +257,7 @@
257257
#define ARMV8_PMU_USERENR_SW (1 << 1) /* PMSWINC can be written at EL0 */
258258
#define ARMV8_PMU_USERENR_CR (1 << 2) /* Cycle counter can be read at EL0 */
259259
#define ARMV8_PMU_USERENR_ER (1 << 3) /* Event counter can be read at EL0 */
260+
#define ARMV8_PMU_USERENR_UEN (1 << 4) /* Fine grained per counter access at EL0 */
260261
/* Mask for writable bits */
261262
#define ARMV8_PMU_USERENR_MASK (ARMV8_PMU_USERENR_EN | ARMV8_PMU_USERENR_SW | \
262263
ARMV8_PMU_USERENR_CR | ARMV8_PMU_USERENR_ER)

0 commit comments

Comments
 (0)