Skip to content

Commit c0357a7

Browse files
brooniectmarinas
authored andcommitted
arm64/sysreg: Align field names in ID_AA64DFR0_EL1 with architecture
The naming scheme the architecture uses for the fields in ID_AA64DFR0_EL1 does not align well with kernel conventions, using as it does a lot of MixedCase in various arrangements. In preparation for automatically generating the defines for this register rename the defines used to match what is in the architecture. Signed-off-by: Mark Brown <[email protected]> Link: https://lore.kernel.org/r/[email protected] Signed-off-by: Catalin Marinas <[email protected]>
1 parent 3e9ae1c commit c0357a7

File tree

12 files changed

+64
-64
lines changed

12 files changed

+64
-64
lines changed

arch/arm64/include/asm/assembler.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -512,7 +512,7 @@ alternative_endif
512512
*/
513513
.macro reset_pmuserenr_el0, tmpreg
514514
mrs \tmpreg, id_aa64dfr0_el1
515-
sbfx \tmpreg, \tmpreg, #ID_AA64DFR0_PMUVER_SHIFT, #4
515+
sbfx \tmpreg, \tmpreg, #ID_AA64DFR0_PMUVer_SHIFT, #4
516516
cmp \tmpreg, #1 // Skip if no PMU present
517517
b.lt 9000f
518518
msr pmuserenr_el0, xzr // Disable PMU access from EL0

arch/arm64/include/asm/cpufeature.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -553,7 +553,7 @@ cpuid_feature_cap_perfmon_field(u64 features, int field, u64 cap)
553553
u64 mask = GENMASK_ULL(field + 3, field);
554554

555555
/* Treat IMPLEMENTATION DEFINED functionality as unimplemented */
556-
if (val == ID_AA64DFR0_PMUVER_IMP_DEF)
556+
if (val == ID_AA64DFR0_PMUVer_IMP_DEF)
557557
val = 0;
558558

559559
if (val > cap) {

arch/arm64/include/asm/el2_setup.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@
4040

4141
.macro __init_el2_debug
4242
mrs x1, id_aa64dfr0_el1
43-
sbfx x0, x1, #ID_AA64DFR0_PMUVER_SHIFT, #4
43+
sbfx x0, x1, #ID_AA64DFR0_PMUVer_SHIFT, #4
4444
cmp x0, #1
4545
b.lt .Lskip_pmu_\@ // Skip if no PMU present
4646
mrs x0, pmcr_el0 // Disable debug access traps
@@ -49,7 +49,7 @@
4949
csel x2, xzr, x0, lt // all PMU counters from EL1
5050

5151
/* Statistical profiling */
52-
ubfx x0, x1, #ID_AA64DFR0_PMSVER_SHIFT, #4
52+
ubfx x0, x1, #ID_AA64DFR0_PMSVer_SHIFT, #4
5353
cbz x0, .Lskip_spe_\@ // Skip if SPE not present
5454

5555
mrs_s x0, SYS_PMBIDR_EL1 // If SPE available at EL2,
@@ -65,7 +65,7 @@
6565

6666
.Lskip_spe_\@:
6767
/* Trace buffer */
68-
ubfx x0, x1, #ID_AA64DFR0_TRBE_SHIFT, #4
68+
ubfx x0, x1, #ID_AA64DFR0_TraceBuffer_SHIFT, #4
6969
cbz x0, .Lskip_trace_\@ // Skip if TraceBuffer is not present
7070

7171
mrs_s x0, SYS_TRBIDR_EL1
@@ -137,7 +137,7 @@
137137

138138
mov x0, xzr
139139
mrs x1, id_aa64dfr0_el1
140-
ubfx x1, x1, #ID_AA64DFR0_PMSVER_SHIFT, #4
140+
ubfx x1, x1, #ID_AA64DFR0_PMSVer_SHIFT, #4
141141
cmp x1, #3
142142
b.lt .Lset_debug_fgt_\@
143143
/* Disable PMSNEVFR_EL1 read and write traps */

arch/arm64/include/asm/hw_breakpoint.h

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,7 @@ static inline int get_num_brps(void)
142142
u64 dfr0 = read_sanitised_ftr_reg(SYS_ID_AA64DFR0_EL1);
143143
return 1 +
144144
cpuid_feature_extract_unsigned_field(dfr0,
145-
ID_AA64DFR0_BRPS_SHIFT);
145+
ID_AA64DFR0_BRPs_SHIFT);
146146
}
147147

148148
/* Determine number of WRP registers available. */
@@ -151,7 +151,7 @@ static inline int get_num_wrps(void)
151151
u64 dfr0 = read_sanitised_ftr_reg(SYS_ID_AA64DFR0_EL1);
152152
return 1 +
153153
cpuid_feature_extract_unsigned_field(dfr0,
154-
ID_AA64DFR0_WRPS_SHIFT);
154+
ID_AA64DFR0_WRPs_SHIFT);
155155
}
156156

157157
#endif /* __ASM_BREAKPOINT_H */

arch/arm64/include/asm/sysreg.h

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -700,26 +700,26 @@
700700

701701
/* id_aa64dfr0 */
702702
#define ID_AA64DFR0_MTPMU_SHIFT 48
703-
#define ID_AA64DFR0_TRBE_SHIFT 44
704-
#define ID_AA64DFR0_TRACE_FILT_SHIFT 40
705-
#define ID_AA64DFR0_DOUBLELOCK_SHIFT 36
706-
#define ID_AA64DFR0_PMSVER_SHIFT 32
707-
#define ID_AA64DFR0_CTX_CMPS_SHIFT 28
708-
#define ID_AA64DFR0_WRPS_SHIFT 20
709-
#define ID_AA64DFR0_BRPS_SHIFT 12
710-
#define ID_AA64DFR0_PMUVER_SHIFT 8
711-
#define ID_AA64DFR0_TRACEVER_SHIFT 4
712-
#define ID_AA64DFR0_DEBUGVER_SHIFT 0
713-
714-
#define ID_AA64DFR0_PMUVER_8_0 0x1
715-
#define ID_AA64DFR0_PMUVER_8_1 0x4
716-
#define ID_AA64DFR0_PMUVER_8_4 0x5
717-
#define ID_AA64DFR0_PMUVER_8_5 0x6
718-
#define ID_AA64DFR0_PMUVER_8_7 0x7
719-
#define ID_AA64DFR0_PMUVER_IMP_DEF 0xf
720-
721-
#define ID_AA64DFR0_PMSVER_8_2 0x1
722-
#define ID_AA64DFR0_PMSVER_8_3 0x2
703+
#define ID_AA64DFR0_TraceBuffer_SHIFT 44
704+
#define ID_AA64DFR0_TraceFilt_SHIFT 40
705+
#define ID_AA64DFR0_DoubleLock_SHIFT 36
706+
#define ID_AA64DFR0_PMSVer_SHIFT 32
707+
#define ID_AA64DFR0_CTX_CMPs_SHIFT 28
708+
#define ID_AA64DFR0_WRPs_SHIFT 20
709+
#define ID_AA64DFR0_BRPs_SHIFT 12
710+
#define ID_AA64DFR0_PMUVer_SHIFT 8
711+
#define ID_AA64DFR0_TraceVer_SHIFT 4
712+
#define ID_AA64DFR0_DebugVer_SHIFT 0
713+
714+
#define ID_AA64DFR0_PMUVer_8_0 0x1
715+
#define ID_AA64DFR0_PMUVer_8_1 0x4
716+
#define ID_AA64DFR0_PMUVer_8_4 0x5
717+
#define ID_AA64DFR0_PMUVer_8_5 0x6
718+
#define ID_AA64DFR0_PMUVer_8_7 0x7
719+
#define ID_AA64DFR0_PMUVer_IMP_DEF 0xf
720+
721+
#define ID_AA64DFR0_PMSVer_8_2 0x1
722+
#define ID_AA64DFR0_PMSVer_8_3 0x2
723723

724724
#define ID_DFR0_PERFMON_SHIFT 24
725725

arch/arm64/kernel/cpufeature.c

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -434,17 +434,17 @@ static const struct arm64_ftr_bits ftr_id_mmfr0[] = {
434434
};
435435

436436
static const struct arm64_ftr_bits ftr_id_aa64dfr0[] = {
437-
S_ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64DFR0_DOUBLELOCK_SHIFT, 4, 0),
438-
ARM64_FTR_BITS(FTR_HIDDEN, FTR_NONSTRICT, FTR_LOWER_SAFE, ID_AA64DFR0_PMSVER_SHIFT, 4, 0),
439-
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64DFR0_CTX_CMPS_SHIFT, 4, 0),
440-
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64DFR0_WRPS_SHIFT, 4, 0),
441-
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64DFR0_BRPS_SHIFT, 4, 0),
437+
S_ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64DFR0_DoubleLock_SHIFT, 4, 0),
438+
ARM64_FTR_BITS(FTR_HIDDEN, FTR_NONSTRICT, FTR_LOWER_SAFE, ID_AA64DFR0_PMSVer_SHIFT, 4, 0),
439+
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64DFR0_CTX_CMPs_SHIFT, 4, 0),
440+
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64DFR0_WRPs_SHIFT, 4, 0),
441+
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64DFR0_BRPs_SHIFT, 4, 0),
442442
/*
443443
* We can instantiate multiple PMU instances with different levels
444444
* of support.
445445
*/
446-
S_ARM64_FTR_BITS(FTR_HIDDEN, FTR_NONSTRICT, FTR_EXACT, ID_AA64DFR0_PMUVER_SHIFT, 4, 0),
447-
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_EXACT, ID_AA64DFR0_DEBUGVER_SHIFT, 4, 0x6),
446+
S_ARM64_FTR_BITS(FTR_HIDDEN, FTR_NONSTRICT, FTR_EXACT, ID_AA64DFR0_PMUVer_SHIFT, 4, 0),
447+
ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_EXACT, ID_AA64DFR0_DebugVer_SHIFT, 4, 0x6),
448448
ARM64_FTR_END,
449449
};
450450

arch/arm64/kernel/debug-monitors.c

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
u8 debug_monitors_arch(void)
2929
{
3030
return cpuid_feature_extract_unsigned_field(read_sanitised_ftr_reg(SYS_ID_AA64DFR0_EL1),
31-
ID_AA64DFR0_DEBUGVER_SHIFT);
31+
ID_AA64DFR0_DebugVer_SHIFT);
3232
}
3333

3434
/*

arch/arm64/kernel/perf_event.c

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -390,7 +390,7 @@ static const struct attribute_group armv8_pmuv3_caps_attr_group = {
390390
*/
391391
static bool armv8pmu_has_long_event(struct arm_pmu *cpu_pmu)
392392
{
393-
return (cpu_pmu->pmuver >= ID_AA64DFR0_PMUVER_8_5);
393+
return (cpu_pmu->pmuver >= ID_AA64DFR0_PMUVer_8_5);
394394
}
395395

396396
static inline bool armv8pmu_event_has_user_read(struct perf_event *event)
@@ -1145,8 +1145,8 @@ static void __armv8pmu_probe_pmu(void *info)
11451145

11461146
dfr0 = read_sysreg(id_aa64dfr0_el1);
11471147
pmuver = cpuid_feature_extract_unsigned_field(dfr0,
1148-
ID_AA64DFR0_PMUVER_SHIFT);
1149-
if (pmuver == ID_AA64DFR0_PMUVER_IMP_DEF || pmuver == 0)
1148+
ID_AA64DFR0_PMUVer_SHIFT);
1149+
if (pmuver == ID_AA64DFR0_PMUVer_IMP_DEF || pmuver == 0)
11501150
return;
11511151

11521152
cpu_pmu->pmuver = pmuver;
@@ -1172,7 +1172,7 @@ static void __armv8pmu_probe_pmu(void *info)
11721172
pmceid, ARMV8_PMUV3_MAX_COMMON_EVENTS);
11731173

11741174
/* store PMMIR_EL1 register for sysfs */
1175-
if (pmuver >= ID_AA64DFR0_PMUVER_8_4 && (pmceid_raw[1] & BIT(31)))
1175+
if (pmuver >= ID_AA64DFR0_PMUVer_8_4 && (pmceid_raw[1] & BIT(31)))
11761176
cpu_pmu->reg_pmmir = read_cpuid(PMMIR_EL1);
11771177
else
11781178
cpu_pmu->reg_pmmir = 0;

arch/arm64/kvm/debug.c

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -295,12 +295,12 @@ void kvm_arch_vcpu_load_debug_state_flags(struct kvm_vcpu *vcpu)
295295
* If SPE is present on this CPU and is available at current EL,
296296
* we may need to check if the host state needs to be saved.
297297
*/
298-
if (cpuid_feature_extract_unsigned_field(dfr0, ID_AA64DFR0_PMSVER_SHIFT) &&
298+
if (cpuid_feature_extract_unsigned_field(dfr0, ID_AA64DFR0_PMSVer_SHIFT) &&
299299
!(read_sysreg_s(SYS_PMBIDR_EL1) & BIT(SYS_PMBIDR_EL1_P_SHIFT)))
300300
vcpu_set_flag(vcpu, DEBUG_STATE_SAVE_SPE);
301301

302302
/* Check if we have TRBE implemented and available at the host */
303-
if (cpuid_feature_extract_unsigned_field(dfr0, ID_AA64DFR0_TRBE_SHIFT) &&
303+
if (cpuid_feature_extract_unsigned_field(dfr0, ID_AA64DFR0_TraceBuffer_SHIFT) &&
304304
!(read_sysreg_s(SYS_TRBIDR_EL1) & TRBIDR_PROG))
305305
vcpu_set_flag(vcpu, DEBUG_STATE_SAVE_TRBE);
306306
}

arch/arm64/kvm/hyp/nvhe/pkvm.c

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -86,32 +86,32 @@ static void pvm_init_traps_aa64dfr0(struct kvm_vcpu *vcpu)
8686
u64 cptr_set = 0;
8787

8888
/* Trap/constrain PMU */
89-
if (!FIELD_GET(ARM64_FEATURE_MASK(ID_AA64DFR0_PMUVER), feature_ids)) {
89+
if (!FIELD_GET(ARM64_FEATURE_MASK(ID_AA64DFR0_PMUVer), feature_ids)) {
9090
mdcr_set |= MDCR_EL2_TPM | MDCR_EL2_TPMCR;
9191
mdcr_clear |= MDCR_EL2_HPME | MDCR_EL2_MTPME |
9292
MDCR_EL2_HPMN_MASK;
9393
}
9494

9595
/* Trap Debug */
96-
if (!FIELD_GET(ARM64_FEATURE_MASK(ID_AA64DFR0_DEBUGVER), feature_ids))
96+
if (!FIELD_GET(ARM64_FEATURE_MASK(ID_AA64DFR0_DebugVer), feature_ids))
9797
mdcr_set |= MDCR_EL2_TDRA | MDCR_EL2_TDA | MDCR_EL2_TDE;
9898

9999
/* Trap OS Double Lock */
100-
if (!FIELD_GET(ARM64_FEATURE_MASK(ID_AA64DFR0_DOUBLELOCK), feature_ids))
100+
if (!FIELD_GET(ARM64_FEATURE_MASK(ID_AA64DFR0_DoubleLock), feature_ids))
101101
mdcr_set |= MDCR_EL2_TDOSA;
102102

103103
/* Trap SPE */
104-
if (!FIELD_GET(ARM64_FEATURE_MASK(ID_AA64DFR0_PMSVER), feature_ids)) {
104+
if (!FIELD_GET(ARM64_FEATURE_MASK(ID_AA64DFR0_PMSVer), feature_ids)) {
105105
mdcr_set |= MDCR_EL2_TPMS;
106106
mdcr_clear |= MDCR_EL2_E2PB_MASK << MDCR_EL2_E2PB_SHIFT;
107107
}
108108

109109
/* Trap Trace Filter */
110-
if (!FIELD_GET(ARM64_FEATURE_MASK(ID_AA64DFR0_TRACE_FILT), feature_ids))
110+
if (!FIELD_GET(ARM64_FEATURE_MASK(ID_AA64DFR0_TraceFilt), feature_ids))
111111
mdcr_set |= MDCR_EL2_TTRF;
112112

113113
/* Trap Trace */
114-
if (!FIELD_GET(ARM64_FEATURE_MASK(ID_AA64DFR0_TRACEVER), feature_ids))
114+
if (!FIELD_GET(ARM64_FEATURE_MASK(ID_AA64DFR0_TraceVer), feature_ids))
115115
cptr_set |= CPTR_EL2_TTA;
116116

117117
vcpu->arch.mdcr_el2 |= mdcr_set;

0 commit comments

Comments
 (0)