15
15
#include <asm/barrier.h>
16
16
#include <asm/cmpxchg.h>
17
17
18
- static inline int arch_atomic_read (const atomic_t * v )
18
+ static __always_inline int arch_atomic_read (const atomic_t * v )
19
19
{
20
20
return __atomic_read (v );
21
21
}
22
22
#define arch_atomic_read arch_atomic_read
23
23
24
- static inline void arch_atomic_set (atomic_t * v , int i )
24
+ static __always_inline void arch_atomic_set (atomic_t * v , int i )
25
25
{
26
26
__atomic_set (v , i );
27
27
}
28
28
#define arch_atomic_set arch_atomic_set
29
29
30
- static inline int arch_atomic_add_return (int i , atomic_t * v )
30
+ static __always_inline int arch_atomic_add_return (int i , atomic_t * v )
31
31
{
32
32
return __atomic_add_barrier (i , & v -> counter ) + i ;
33
33
}
34
34
#define arch_atomic_add_return arch_atomic_add_return
35
35
36
- static inline int arch_atomic_fetch_add (int i , atomic_t * v )
36
+ static __always_inline int arch_atomic_fetch_add (int i , atomic_t * v )
37
37
{
38
38
return __atomic_add_barrier (i , & v -> counter );
39
39
}
40
40
#define arch_atomic_fetch_add arch_atomic_fetch_add
41
41
42
- static inline void arch_atomic_add (int i , atomic_t * v )
42
+ static __always_inline void arch_atomic_add (int i , atomic_t * v )
43
43
{
44
44
__atomic_add (i , & v -> counter );
45
45
}
@@ -50,11 +50,11 @@ static inline void arch_atomic_add(int i, atomic_t *v)
50
50
#define arch_atomic_fetch_sub (_i , _v ) arch_atomic_fetch_add(-(int)(_i), _v)
51
51
52
52
#define ATOMIC_OPS (op ) \
53
- static inline void arch_atomic_##op(int i, atomic_t *v) \
53
+ static __always_inline void arch_atomic_##op(int i, atomic_t *v) \
54
54
{ \
55
55
__atomic_##op(i, &v->counter); \
56
56
} \
57
- static inline int arch_atomic_fetch_##op(int i, atomic_t *v) \
57
+ static __always_inline int arch_atomic_fetch_##op(int i, atomic_t *v) \
58
58
{ \
59
59
return __atomic_##op##_barrier(i, &v->counter); \
60
60
}
@@ -74,60 +74,60 @@ ATOMIC_OPS(xor)
74
74
75
75
#define arch_atomic_xchg (v , new ) (arch_xchg(&((v)->counter), new))
76
76
77
- static inline int arch_atomic_cmpxchg (atomic_t * v , int old , int new )
77
+ static __always_inline int arch_atomic_cmpxchg (atomic_t * v , int old , int new )
78
78
{
79
79
return __atomic_cmpxchg (& v -> counter , old , new );
80
80
}
81
81
#define arch_atomic_cmpxchg arch_atomic_cmpxchg
82
82
83
83
#define ATOMIC64_INIT (i ) { (i) }
84
84
85
- static inline s64 arch_atomic64_read (const atomic64_t * v )
85
+ static __always_inline s64 arch_atomic64_read (const atomic64_t * v )
86
86
{
87
87
return __atomic64_read (v );
88
88
}
89
89
#define arch_atomic64_read arch_atomic64_read
90
90
91
- static inline void arch_atomic64_set (atomic64_t * v , s64 i )
91
+ static __always_inline void arch_atomic64_set (atomic64_t * v , s64 i )
92
92
{
93
93
__atomic64_set (v , i );
94
94
}
95
95
#define arch_atomic64_set arch_atomic64_set
96
96
97
- static inline s64 arch_atomic64_add_return (s64 i , atomic64_t * v )
97
+ static __always_inline s64 arch_atomic64_add_return (s64 i , atomic64_t * v )
98
98
{
99
99
return __atomic64_add_barrier (i , (long * )& v -> counter ) + i ;
100
100
}
101
101
#define arch_atomic64_add_return arch_atomic64_add_return
102
102
103
- static inline s64 arch_atomic64_fetch_add (s64 i , atomic64_t * v )
103
+ static __always_inline s64 arch_atomic64_fetch_add (s64 i , atomic64_t * v )
104
104
{
105
105
return __atomic64_add_barrier (i , (long * )& v -> counter );
106
106
}
107
107
#define arch_atomic64_fetch_add arch_atomic64_fetch_add
108
108
109
- static inline void arch_atomic64_add (s64 i , atomic64_t * v )
109
+ static __always_inline void arch_atomic64_add (s64 i , atomic64_t * v )
110
110
{
111
111
__atomic64_add (i , (long * )& v -> counter );
112
112
}
113
113
#define arch_atomic64_add arch_atomic64_add
114
114
115
115
#define arch_atomic64_xchg (v , new ) (arch_xchg(&((v)->counter), new))
116
116
117
- static inline s64 arch_atomic64_cmpxchg (atomic64_t * v , s64 old , s64 new )
117
+ static __always_inline s64 arch_atomic64_cmpxchg (atomic64_t * v , s64 old , s64 new )
118
118
{
119
119
return __atomic64_cmpxchg ((long * )& v -> counter , old , new );
120
120
}
121
121
#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
122
122
123
- #define ATOMIC64_OPS (op ) \
124
- static inline void arch_atomic64_##op(s64 i, atomic64_t *v) \
125
- { \
126
- __atomic64_##op(i, (long *)&v->counter); \
127
- } \
128
- static inline long arch_atomic64_fetch_##op(s64 i, atomic64_t *v) \
129
- { \
130
- return __atomic64_##op##_barrier(i, (long *)&v->counter); \
123
+ #define ATOMIC64_OPS (op ) \
124
+ static __always_inline void arch_atomic64_##op(s64 i, atomic64_t *v) \
125
+ { \
126
+ __atomic64_##op(i, (long *)&v->counter); \
127
+ } \
128
+ static __always_inline long arch_atomic64_fetch_##op(s64 i, atomic64_t *v) \
129
+ { \
130
+ return __atomic64_##op##_barrier(i, (long *)&v->counter); \
131
131
}
132
132
133
133
ATOMIC64_OPS (and )
0 commit comments