Skip to content

Commit d4f31ac

Browse files
ubizjakchenhuacai
authored andcommitted
LoongArch: Simplify _percpu_read() and _percpu_write()
Now _percpu_read() and _percpu_write() macros call __percpu_read() and __percpu_write() static inline functions that result in a single assembly instruction. However, percpu infrastructure expects its leaf definitions to encode the size of their percpu variable, so the patch merges all the asm clauses from the static inline function into the corresponding leaf macros. The secondary effect of this change is to avoid explicit __percpu annotations for function arguments. Currently, __percpu macro is defined in include/linux/compiler_types.h, but with proposed patch [1], __percpu definition will need macros from include/asm-generic/percpu.h, creating forward dependency loop. The proposed solution is the same as x86 architecture uses. [1] https://lore.kernel.org/lkml/[email protected]/ Tested-by: Xi Ruoyao <[email protected]> Signed-off-by: Uros Bizjak <[email protected]> Signed-off-by: Huacai Chen <[email protected]>
1 parent f93f67d commit d4f31ac

File tree

1 file changed

+35
-89
lines changed

1 file changed

+35
-89
lines changed

arch/loongarch/include/asm/percpu.h

Lines changed: 35 additions & 89 deletions
Original file line numberDiff line numberDiff line change
@@ -68,75 +68,6 @@ PERCPU_OP(and, and, &)
6868
PERCPU_OP(or, or, |)
6969
#undef PERCPU_OP
7070

71-
static __always_inline unsigned long __percpu_read(void __percpu *ptr, int size)
72-
{
73-
unsigned long ret;
74-
75-
switch (size) {
76-
case 1:
77-
__asm__ __volatile__ ("ldx.b %[ret], $r21, %[ptr] \n"
78-
: [ret] "=&r"(ret)
79-
: [ptr] "r"(ptr)
80-
: "memory");
81-
break;
82-
case 2:
83-
__asm__ __volatile__ ("ldx.h %[ret], $r21, %[ptr] \n"
84-
: [ret] "=&r"(ret)
85-
: [ptr] "r"(ptr)
86-
: "memory");
87-
break;
88-
case 4:
89-
__asm__ __volatile__ ("ldx.w %[ret], $r21, %[ptr] \n"
90-
: [ret] "=&r"(ret)
91-
: [ptr] "r"(ptr)
92-
: "memory");
93-
break;
94-
case 8:
95-
__asm__ __volatile__ ("ldx.d %[ret], $r21, %[ptr] \n"
96-
: [ret] "=&r"(ret)
97-
: [ptr] "r"(ptr)
98-
: "memory");
99-
break;
100-
default:
101-
ret = 0;
102-
BUILD_BUG();
103-
}
104-
105-
return ret;
106-
}
107-
108-
static __always_inline void __percpu_write(void __percpu *ptr, unsigned long val, int size)
109-
{
110-
switch (size) {
111-
case 1:
112-
__asm__ __volatile__("stx.b %[val], $r21, %[ptr] \n"
113-
:
114-
: [val] "r" (val), [ptr] "r" (ptr)
115-
: "memory");
116-
break;
117-
case 2:
118-
__asm__ __volatile__("stx.h %[val], $r21, %[ptr] \n"
119-
:
120-
: [val] "r" (val), [ptr] "r" (ptr)
121-
: "memory");
122-
break;
123-
case 4:
124-
__asm__ __volatile__("stx.w %[val], $r21, %[ptr] \n"
125-
:
126-
: [val] "r" (val), [ptr] "r" (ptr)
127-
: "memory");
128-
break;
129-
case 8:
130-
__asm__ __volatile__("stx.d %[val], $r21, %[ptr] \n"
131-
:
132-
: [val] "r" (val), [ptr] "r" (ptr)
133-
: "memory");
134-
break;
135-
default:
136-
BUILD_BUG();
137-
}
138-
}
139-
14071
static __always_inline unsigned long __percpu_xchg(void *ptr, unsigned long val, int size)
14172
{
14273
switch (size) {
@@ -157,6 +88,33 @@ static __always_inline unsigned long __percpu_xchg(void *ptr, unsigned long val,
15788
return 0;
15889
}
15990

91+
#define __pcpu_op_1(op) op ".b "
92+
#define __pcpu_op_2(op) op ".h "
93+
#define __pcpu_op_4(op) op ".w "
94+
#define __pcpu_op_8(op) op ".d "
95+
96+
#define _percpu_read(size, _pcp) \
97+
({ \
98+
typeof(_pcp) __pcp_ret; \
99+
\
100+
__asm__ __volatile__( \
101+
__pcpu_op_##size("ldx") "%[ret], $r21, %[ptr] \n" \
102+
: [ret] "=&r"(__pcp_ret) \
103+
: [ptr] "r"(&(_pcp)) \
104+
: "memory"); \
105+
\
106+
__pcp_ret; \
107+
})
108+
109+
#define _percpu_write(size, _pcp, _val) \
110+
do { \
111+
__asm__ __volatile__( \
112+
__pcpu_op_##size("stx") "%[val], $r21, %[ptr] \n" \
113+
: \
114+
: [val] "r"(_val), [ptr] "r"(&(_pcp)) \
115+
: "memory"); \
116+
} while (0)
117+
160118
/* this_cpu_cmpxchg */
161119
#define _protect_cmpxchg_local(pcp, o, n) \
162120
({ \
@@ -167,18 +125,6 @@ static __always_inline unsigned long __percpu_xchg(void *ptr, unsigned long val,
167125
__ret; \
168126
})
169127

170-
#define _percpu_read(pcp) \
171-
({ \
172-
typeof(pcp) __retval; \
173-
__retval = (typeof(pcp))__percpu_read(&(pcp), sizeof(pcp)); \
174-
__retval; \
175-
})
176-
177-
#define _percpu_write(pcp, val) \
178-
do { \
179-
__percpu_write(&(pcp), (unsigned long)(val), sizeof(pcp)); \
180-
} while (0) \
181-
182128
#define _pcp_protect(operation, pcp, val) \
183129
({ \
184130
typeof(pcp) __retval; \
@@ -215,15 +161,15 @@ do { \
215161
#define this_cpu_or_4(pcp, val) _percpu_or(pcp, val)
216162
#define this_cpu_or_8(pcp, val) _percpu_or(pcp, val)
217163

218-
#define this_cpu_read_1(pcp) _percpu_read(pcp)
219-
#define this_cpu_read_2(pcp) _percpu_read(pcp)
220-
#define this_cpu_read_4(pcp) _percpu_read(pcp)
221-
#define this_cpu_read_8(pcp) _percpu_read(pcp)
164+
#define this_cpu_read_1(pcp) _percpu_read(1, pcp)
165+
#define this_cpu_read_2(pcp) _percpu_read(2, pcp)
166+
#define this_cpu_read_4(pcp) _percpu_read(4, pcp)
167+
#define this_cpu_read_8(pcp) _percpu_read(8, pcp)
222168

223-
#define this_cpu_write_1(pcp, val) _percpu_write(pcp, val)
224-
#define this_cpu_write_2(pcp, val) _percpu_write(pcp, val)
225-
#define this_cpu_write_4(pcp, val) _percpu_write(pcp, val)
226-
#define this_cpu_write_8(pcp, val) _percpu_write(pcp, val)
169+
#define this_cpu_write_1(pcp, val) _percpu_write(1, pcp, val)
170+
#define this_cpu_write_2(pcp, val) _percpu_write(2, pcp, val)
171+
#define this_cpu_write_4(pcp, val) _percpu_write(4, pcp, val)
172+
#define this_cpu_write_8(pcp, val) _percpu_write(8, pcp, val)
227173

228174
#define this_cpu_xchg_1(pcp, val) _percpu_xchg(pcp, val)
229175
#define this_cpu_xchg_2(pcp, val) _percpu_xchg(pcp, val)

0 commit comments

Comments
 (0)