Skip to content

Commit e96ebd5

Browse files
danglin44hdeller
authored andcommitted
parisc: Implement __smp_store_release and __smp_load_acquire barriers
This patch implements the __smp_store_release and __smp_load_acquire barriers using ordered stores and loads. This avoids the sync instruction present in the generic implementation. Cc: <[email protected]> # 4.14+ Signed-off-by: Dave Anglin <[email protected]> Signed-off-by: Helge Deller <[email protected]>
1 parent 5b24993 commit e96ebd5

File tree

1 file changed

+61
-0
lines changed

1 file changed

+61
-0
lines changed

arch/parisc/include/asm/barrier.h

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -26,6 +26,67 @@
2626
#define __smp_rmb() mb()
2727
#define __smp_wmb() mb()
2828

29+
#define __smp_store_release(p, v) \
30+
do { \
31+
typeof(p) __p = (p); \
32+
union { typeof(*p) __val; char __c[1]; } __u = \
33+
{ .__val = (__force typeof(*p)) (v) }; \
34+
compiletime_assert_atomic_type(*p); \
35+
switch (sizeof(*p)) { \
36+
case 1: \
37+
asm volatile("stb,ma %0,0(%1)" \
38+
: : "r"(*(__u8 *)__u.__c), "r"(__p) \
39+
: "memory"); \
40+
break; \
41+
case 2: \
42+
asm volatile("sth,ma %0,0(%1)" \
43+
: : "r"(*(__u16 *)__u.__c), "r"(__p) \
44+
: "memory"); \
45+
break; \
46+
case 4: \
47+
asm volatile("stw,ma %0,0(%1)" \
48+
: : "r"(*(__u32 *)__u.__c), "r"(__p) \
49+
: "memory"); \
50+
break; \
51+
case 8: \
52+
if (IS_ENABLED(CONFIG_64BIT)) \
53+
asm volatile("std,ma %0,0(%1)" \
54+
: : "r"(*(__u64 *)__u.__c), "r"(__p) \
55+
: "memory"); \
56+
break; \
57+
} \
58+
} while (0)
59+
60+
#define __smp_load_acquire(p) \
61+
({ \
62+
union { typeof(*p) __val; char __c[1]; } __u; \
63+
typeof(p) __p = (p); \
64+
compiletime_assert_atomic_type(*p); \
65+
switch (sizeof(*p)) { \
66+
case 1: \
67+
asm volatile("ldb,ma 0(%1),%0" \
68+
: "=r"(*(__u8 *)__u.__c) : "r"(__p) \
69+
: "memory"); \
70+
break; \
71+
case 2: \
72+
asm volatile("ldh,ma 0(%1),%0" \
73+
: "=r"(*(__u16 *)__u.__c) : "r"(__p) \
74+
: "memory"); \
75+
break; \
76+
case 4: \
77+
asm volatile("ldw,ma 0(%1),%0" \
78+
: "=r"(*(__u32 *)__u.__c) : "r"(__p) \
79+
: "memory"); \
80+
break; \
81+
case 8: \
82+
if (IS_ENABLED(CONFIG_64BIT)) \
83+
asm volatile("ldd,ma 0(%1),%0" \
84+
: "=r"(*(__u64 *)__u.__c) : "r"(__p) \
85+
: "memory"); \
86+
break; \
87+
} \
88+
__u.__val; \
89+
})
2990
#include <asm-generic/barrier.h>
3091

3192
#endif /* !__ASSEMBLY__ */

0 commit comments

Comments
 (0)