@@ -52,9 +52,9 @@ static __always_inline void
52
52
arch_set_bit (long nr , volatile unsigned long * addr )
53
53
{
54
54
if (__builtin_constant_p (nr )) {
55
- asm volatile (LOCK_PREFIX "orb %1 ,%0"
55
+ asm volatile (LOCK_PREFIX "orb %b1 ,%0"
56
56
: CONST_MASK_ADDR (nr , addr )
57
- : "iq" (CONST_MASK (nr ) & 0xff )
57
+ : "iq" (CONST_MASK (nr ))
58
58
: "memory" );
59
59
} else {
60
60
asm volatile (LOCK_PREFIX __ASM_SIZE (bts ) " %1,%0"
@@ -72,9 +72,9 @@ static __always_inline void
72
72
arch_clear_bit (long nr , volatile unsigned long * addr )
73
73
{
74
74
if (__builtin_constant_p (nr )) {
75
- asm volatile (LOCK_PREFIX "andb %1 ,%0"
75
+ asm volatile (LOCK_PREFIX "andb %b1 ,%0"
76
76
: CONST_MASK_ADDR (nr , addr )
77
- : "iq" (CONST_MASK (nr ) ^ 0xff ));
77
+ : "iq" (~ CONST_MASK (nr )));
78
78
} else {
79
79
asm volatile (LOCK_PREFIX __ASM_SIZE (btr ) " %1,%0"
80
80
: : RLONG_ADDR (addr ), "Ir" (nr ) : "memory ");
@@ -123,9 +123,9 @@ static __always_inline void
123
123
arch_change_bit (long nr , volatile unsigned long * addr )
124
124
{
125
125
if (__builtin_constant_p (nr )) {
126
- asm volatile (LOCK_PREFIX "xorb %1 ,%0"
126
+ asm volatile (LOCK_PREFIX "xorb %b1 ,%0"
127
127
: CONST_MASK_ADDR (nr , addr )
128
- : "iq" (( u8 ) CONST_MASK (nr )));
128
+ : "iq" (CONST_MASK (nr )));
129
129
} else {
130
130
asm volatile (LOCK_PREFIX __ASM_SIZE (btc ) " %1,%0"
131
131
: : RLONG_ADDR (addr ), "Ir" (nr ) : "memory ");
0 commit comments