Skip to content

Commit 9651f00

Browse files
committed
Merge branch 'for-next/perf' into for-next/core
* for-next/perf: (24 commits) KVM: arm64: Ensure CPU PMU probes before pKVM host de-privilege drivers/perf: hisi: add NULL check for name drivers/perf: hisi: Remove redundant initialized of pmu->name perf/arm-cmn: Fix port detection for CMN-700 arm64: pmuv3: dynamically map PERF_COUNT_HW_BRANCH_INSTRUCTIONS perf/arm-cmn: Validate cycles events fully Revert "ARM: mach-virt: Select PMUv3 driver by default" drivers/perf: apple_m1: Add Apple M2 support dt-bindings: arm-pmu: Add PMU compatible strings for Apple M2 cores perf: arm_cspmu: Fix variable dereference warning perf/amlogic: Fix config1/config2 parsing issue drivers/perf: Use devm_platform_get_and_ioremap_resource() kbuild, drivers/perf: remove MODULE_LICENSE in non-modules perf: qcom: Use devm_platform_get_and_ioremap_resource() perf: arm: Use devm_platform_get_and_ioremap_resource() perf/arm-cmn: Move overlapping wp_combine field ARM: mach-virt: Select PMUv3 driver by default ARM: perf: Allow the use of the PMUv3 driver on 32bit ARM ARM: Make CONFIG_CPU_V7 valid for 32bit ARMv8 implementations perf: pmuv3: Change GENMASK to GENMASK_ULL ...
2 parents 1bb31cc + 87727ba commit 9651f00

28 files changed

+909
-466
lines changed

Documentation/devicetree/bindings/arm/pmu.yaml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,8 @@ properties:
2020
items:
2121
- enum:
2222
- apm,potenza-pmu
23+
- apple,avalanche-pmu
24+
- apple,blizzard-pmu
2325
- apple,firestorm-pmu
2426
- apple,icestorm-pmu
2527
- arm,armv8-pmuv3 # Only for s/w models

arch/arm/include/asm/arm_pmuv3.h

Lines changed: 247 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,247 @@
1+
/* SPDX-License-Identifier: GPL-2.0 */
2+
/*
3+
* Copyright (C) 2012 ARM Ltd.
4+
*/
5+
6+
#ifndef __ASM_PMUV3_H
7+
#define __ASM_PMUV3_H
8+
9+
#include <asm/cp15.h>
10+
#include <asm/cputype.h>
11+
12+
#define PMCCNTR __ACCESS_CP15_64(0, c9)
13+
14+
#define PMCR __ACCESS_CP15(c9, 0, c12, 0)
15+
#define PMCNTENSET __ACCESS_CP15(c9, 0, c12, 1)
16+
#define PMCNTENCLR __ACCESS_CP15(c9, 0, c12, 2)
17+
#define PMOVSR __ACCESS_CP15(c9, 0, c12, 3)
18+
#define PMSELR __ACCESS_CP15(c9, 0, c12, 5)
19+
#define PMCEID0 __ACCESS_CP15(c9, 0, c12, 6)
20+
#define PMCEID1 __ACCESS_CP15(c9, 0, c12, 7)
21+
#define PMXEVTYPER __ACCESS_CP15(c9, 0, c13, 1)
22+
#define PMXEVCNTR __ACCESS_CP15(c9, 0, c13, 2)
23+
#define PMUSERENR __ACCESS_CP15(c9, 0, c14, 0)
24+
#define PMINTENSET __ACCESS_CP15(c9, 0, c14, 1)
25+
#define PMINTENCLR __ACCESS_CP15(c9, 0, c14, 2)
26+
#define PMMIR __ACCESS_CP15(c9, 0, c14, 6)
27+
#define PMCCFILTR __ACCESS_CP15(c14, 0, c15, 7)
28+
29+
#define PMEVCNTR0 __ACCESS_CP15(c14, 0, c8, 0)
30+
#define PMEVCNTR1 __ACCESS_CP15(c14, 0, c8, 1)
31+
#define PMEVCNTR2 __ACCESS_CP15(c14, 0, c8, 2)
32+
#define PMEVCNTR3 __ACCESS_CP15(c14, 0, c8, 3)
33+
#define PMEVCNTR4 __ACCESS_CP15(c14, 0, c8, 4)
34+
#define PMEVCNTR5 __ACCESS_CP15(c14, 0, c8, 5)
35+
#define PMEVCNTR6 __ACCESS_CP15(c14, 0, c8, 6)
36+
#define PMEVCNTR7 __ACCESS_CP15(c14, 0, c8, 7)
37+
#define PMEVCNTR8 __ACCESS_CP15(c14, 0, c9, 0)
38+
#define PMEVCNTR9 __ACCESS_CP15(c14, 0, c9, 1)
39+
#define PMEVCNTR10 __ACCESS_CP15(c14, 0, c9, 2)
40+
#define PMEVCNTR11 __ACCESS_CP15(c14, 0, c9, 3)
41+
#define PMEVCNTR12 __ACCESS_CP15(c14, 0, c9, 4)
42+
#define PMEVCNTR13 __ACCESS_CP15(c14, 0, c9, 5)
43+
#define PMEVCNTR14 __ACCESS_CP15(c14, 0, c9, 6)
44+
#define PMEVCNTR15 __ACCESS_CP15(c14, 0, c9, 7)
45+
#define PMEVCNTR16 __ACCESS_CP15(c14, 0, c10, 0)
46+
#define PMEVCNTR17 __ACCESS_CP15(c14, 0, c10, 1)
47+
#define PMEVCNTR18 __ACCESS_CP15(c14, 0, c10, 2)
48+
#define PMEVCNTR19 __ACCESS_CP15(c14, 0, c10, 3)
49+
#define PMEVCNTR20 __ACCESS_CP15(c14, 0, c10, 4)
50+
#define PMEVCNTR21 __ACCESS_CP15(c14, 0, c10, 5)
51+
#define PMEVCNTR22 __ACCESS_CP15(c14, 0, c10, 6)
52+
#define PMEVCNTR23 __ACCESS_CP15(c14, 0, c10, 7)
53+
#define PMEVCNTR24 __ACCESS_CP15(c14, 0, c11, 0)
54+
#define PMEVCNTR25 __ACCESS_CP15(c14, 0, c11, 1)
55+
#define PMEVCNTR26 __ACCESS_CP15(c14, 0, c11, 2)
56+
#define PMEVCNTR27 __ACCESS_CP15(c14, 0, c11, 3)
57+
#define PMEVCNTR28 __ACCESS_CP15(c14, 0, c11, 4)
58+
#define PMEVCNTR29 __ACCESS_CP15(c14, 0, c11, 5)
59+
#define PMEVCNTR30 __ACCESS_CP15(c14, 0, c11, 6)
60+
61+
#define PMEVTYPER0 __ACCESS_CP15(c14, 0, c12, 0)
62+
#define PMEVTYPER1 __ACCESS_CP15(c14, 0, c12, 1)
63+
#define PMEVTYPER2 __ACCESS_CP15(c14, 0, c12, 2)
64+
#define PMEVTYPER3 __ACCESS_CP15(c14, 0, c12, 3)
65+
#define PMEVTYPER4 __ACCESS_CP15(c14, 0, c12, 4)
66+
#define PMEVTYPER5 __ACCESS_CP15(c14, 0, c12, 5)
67+
#define PMEVTYPER6 __ACCESS_CP15(c14, 0, c12, 6)
68+
#define PMEVTYPER7 __ACCESS_CP15(c14, 0, c12, 7)
69+
#define PMEVTYPER8 __ACCESS_CP15(c14, 0, c13, 0)
70+
#define PMEVTYPER9 __ACCESS_CP15(c14, 0, c13, 1)
71+
#define PMEVTYPER10 __ACCESS_CP15(c14, 0, c13, 2)
72+
#define PMEVTYPER11 __ACCESS_CP15(c14, 0, c13, 3)
73+
#define PMEVTYPER12 __ACCESS_CP15(c14, 0, c13, 4)
74+
#define PMEVTYPER13 __ACCESS_CP15(c14, 0, c13, 5)
75+
#define PMEVTYPER14 __ACCESS_CP15(c14, 0, c13, 6)
76+
#define PMEVTYPER15 __ACCESS_CP15(c14, 0, c13, 7)
77+
#define PMEVTYPER16 __ACCESS_CP15(c14, 0, c14, 0)
78+
#define PMEVTYPER17 __ACCESS_CP15(c14, 0, c14, 1)
79+
#define PMEVTYPER18 __ACCESS_CP15(c14, 0, c14, 2)
80+
#define PMEVTYPER19 __ACCESS_CP15(c14, 0, c14, 3)
81+
#define PMEVTYPER20 __ACCESS_CP15(c14, 0, c14, 4)
82+
#define PMEVTYPER21 __ACCESS_CP15(c14, 0, c14, 5)
83+
#define PMEVTYPER22 __ACCESS_CP15(c14, 0, c14, 6)
84+
#define PMEVTYPER23 __ACCESS_CP15(c14, 0, c14, 7)
85+
#define PMEVTYPER24 __ACCESS_CP15(c14, 0, c15, 0)
86+
#define PMEVTYPER25 __ACCESS_CP15(c14, 0, c15, 1)
87+
#define PMEVTYPER26 __ACCESS_CP15(c14, 0, c15, 2)
88+
#define PMEVTYPER27 __ACCESS_CP15(c14, 0, c15, 3)
89+
#define PMEVTYPER28 __ACCESS_CP15(c14, 0, c15, 4)
90+
#define PMEVTYPER29 __ACCESS_CP15(c14, 0, c15, 5)
91+
#define PMEVTYPER30 __ACCESS_CP15(c14, 0, c15, 6)
92+
93+
#define RETURN_READ_PMEVCNTRN(n) \
94+
return read_sysreg(PMEVCNTR##n)
95+
static unsigned long read_pmevcntrn(int n)
96+
{
97+
PMEVN_SWITCH(n, RETURN_READ_PMEVCNTRN);
98+
return 0;
99+
}
100+
101+
#define WRITE_PMEVCNTRN(n) \
102+
write_sysreg(val, PMEVCNTR##n)
103+
static void write_pmevcntrn(int n, unsigned long val)
104+
{
105+
PMEVN_SWITCH(n, WRITE_PMEVCNTRN);
106+
}
107+
108+
#define WRITE_PMEVTYPERN(n) \
109+
write_sysreg(val, PMEVTYPER##n)
110+
static void write_pmevtypern(int n, unsigned long val)
111+
{
112+
PMEVN_SWITCH(n, WRITE_PMEVTYPERN);
113+
}
114+
115+
static inline unsigned long read_pmmir(void)
116+
{
117+
return read_sysreg(PMMIR);
118+
}
119+
120+
static inline u32 read_pmuver(void)
121+
{
122+
/* PMUVers is not a signed field */
123+
u32 dfr0 = read_cpuid_ext(CPUID_EXT_DFR0);
124+
125+
return (dfr0 >> 24) & 0xf;
126+
}
127+
128+
static inline void write_pmcr(u32 val)
129+
{
130+
write_sysreg(val, PMCR);
131+
}
132+
133+
static inline u32 read_pmcr(void)
134+
{
135+
return read_sysreg(PMCR);
136+
}
137+
138+
static inline void write_pmselr(u32 val)
139+
{
140+
write_sysreg(val, PMSELR);
141+
}
142+
143+
static inline void write_pmccntr(u64 val)
144+
{
145+
write_sysreg(val, PMCCNTR);
146+
}
147+
148+
static inline u64 read_pmccntr(void)
149+
{
150+
return read_sysreg(PMCCNTR);
151+
}
152+
153+
static inline void write_pmxevcntr(u32 val)
154+
{
155+
write_sysreg(val, PMXEVCNTR);
156+
}
157+
158+
static inline u32 read_pmxevcntr(void)
159+
{
160+
return read_sysreg(PMXEVCNTR);
161+
}
162+
163+
static inline void write_pmxevtyper(u32 val)
164+
{
165+
write_sysreg(val, PMXEVTYPER);
166+
}
167+
168+
static inline void write_pmcntenset(u32 val)
169+
{
170+
write_sysreg(val, PMCNTENSET);
171+
}
172+
173+
static inline void write_pmcntenclr(u32 val)
174+
{
175+
write_sysreg(val, PMCNTENCLR);
176+
}
177+
178+
static inline void write_pmintenset(u32 val)
179+
{
180+
write_sysreg(val, PMINTENSET);
181+
}
182+
183+
static inline void write_pmintenclr(u32 val)
184+
{
185+
write_sysreg(val, PMINTENCLR);
186+
}
187+
188+
static inline void write_pmccfiltr(u32 val)
189+
{
190+
write_sysreg(val, PMCCFILTR);
191+
}
192+
193+
static inline void write_pmovsclr(u32 val)
194+
{
195+
write_sysreg(val, PMOVSR);
196+
}
197+
198+
static inline u32 read_pmovsclr(void)
199+
{
200+
return read_sysreg(PMOVSR);
201+
}
202+
203+
static inline void write_pmuserenr(u32 val)
204+
{
205+
write_sysreg(val, PMUSERENR);
206+
}
207+
208+
static inline u32 read_pmceid0(void)
209+
{
210+
return read_sysreg(PMCEID0);
211+
}
212+
213+
static inline u32 read_pmceid1(void)
214+
{
215+
return read_sysreg(PMCEID1);
216+
}
217+
218+
static inline void kvm_set_pmu_events(u32 set, struct perf_event_attr *attr) {}
219+
static inline void kvm_clr_pmu_events(u32 clr) {}
220+
static inline bool kvm_pmu_counter_deferred(struct perf_event_attr *attr)
221+
{
222+
return false;
223+
}
224+
225+
/* PMU Version in DFR Register */
226+
#define ARMV8_PMU_DFR_VER_NI 0
227+
#define ARMV8_PMU_DFR_VER_V3P4 0x5
228+
#define ARMV8_PMU_DFR_VER_V3P5 0x6
229+
#define ARMV8_PMU_DFR_VER_IMP_DEF 0xF
230+
231+
static inline bool pmuv3_implemented(int pmuver)
232+
{
233+
return !(pmuver == ARMV8_PMU_DFR_VER_IMP_DEF ||
234+
pmuver == ARMV8_PMU_DFR_VER_NI);
235+
}
236+
237+
static inline bool is_pmuv3p4(int pmuver)
238+
{
239+
return pmuver >= ARMV8_PMU_DFR_VER_V3P4;
240+
}
241+
242+
static inline bool is_pmuv3p5(int pmuver)
243+
{
244+
return pmuver >= ARMV8_PMU_DFR_VER_V3P5;
245+
}
246+
247+
#endif

arch/arm/mm/Kconfig

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -403,7 +403,7 @@ config CPU_V6K
403403
select CPU_THUMB_CAPABLE
404404
select CPU_TLB_V6 if MMU
405405

406-
# ARMv7
406+
# ARMv7 and ARMv8 architectures
407407
config CPU_V7
408408
bool
409409
select CPU_32v6K

0 commit comments

Comments
 (0)