Skip to content

Commit 57ce5d3

Browse files
xen0nchenhuacai
authored andcommitted
LoongArch: Use the "move" pseudo-instruction where applicable
Some of the assembly code in the LoongArch port likely originated from a time when the assembler did not support pseudo-instructions like "move" or "jr", so the desugared form was used and readability suffers (to a minor degree) as a result. As the upstream toolchain supports these pseudo-instructions from the beginning, migrate the existing few usages to them for better readability. Signed-off-by: WANG Xuerui <[email protected]> Signed-off-by: Huacai Chen <[email protected]>
1 parent 07b4806 commit 57ce5d3

File tree

5 files changed

+8
-8
lines changed

5 files changed

+8
-8
lines changed

arch/loongarch/include/asm/atomic.h

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -157,7 +157,7 @@ static inline int arch_atomic_sub_if_positive(int i, atomic_t *v)
157157
__asm__ __volatile__(
158158
"1: ll.w %1, %2 # atomic_sub_if_positive\n"
159159
" addi.w %0, %1, %3 \n"
160-
" or %1, %0, $zero \n"
160+
" move %1, %0 \n"
161161
" blt %0, $zero, 2f \n"
162162
" sc.w %1, %2 \n"
163163
" beq $zero, %1, 1b \n"
@@ -170,7 +170,7 @@ static inline int arch_atomic_sub_if_positive(int i, atomic_t *v)
170170
__asm__ __volatile__(
171171
"1: ll.w %1, %2 # atomic_sub_if_positive\n"
172172
" sub.w %0, %1, %3 \n"
173-
" or %1, %0, $zero \n"
173+
" move %1, %0 \n"
174174
" blt %0, $zero, 2f \n"
175175
" sc.w %1, %2 \n"
176176
" beq $zero, %1, 1b \n"
@@ -320,7 +320,7 @@ static inline long arch_atomic64_sub_if_positive(long i, atomic64_t *v)
320320
__asm__ __volatile__(
321321
"1: ll.d %1, %2 # atomic64_sub_if_positive \n"
322322
" addi.d %0, %1, %3 \n"
323-
" or %1, %0, $zero \n"
323+
" move %1, %0 \n"
324324
" blt %0, $zero, 2f \n"
325325
" sc.d %1, %2 \n"
326326
" beq %1, $zero, 1b \n"
@@ -333,7 +333,7 @@ static inline long arch_atomic64_sub_if_positive(long i, atomic64_t *v)
333333
__asm__ __volatile__(
334334
"1: ll.d %1, %2 # atomic64_sub_if_positive \n"
335335
" sub.d %0, %1, %3 \n"
336-
" or %1, %0, $zero \n"
336+
" move %1, %0 \n"
337337
" blt %0, $zero, 2f \n"
338338
" sc.d %1, %2 \n"
339339
" beq %1, $zero, 1b \n"

arch/loongarch/include/asm/cmpxchg.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ static inline unsigned long __xchg(volatile void *ptr, unsigned long x,
5555
__asm__ __volatile__( \
5656
"1: " ld " %0, %2 # __cmpxchg_asm \n" \
5757
" bne %0, %z3, 2f \n" \
58-
" or $t0, %z4, $zero \n" \
58+
" move $t0, %z4 \n" \
5959
" " st " $t0, %1 \n" \
6060
" beq $zero, $t0, 1b \n" \
6161
"2: \n" \

arch/loongarch/include/asm/futex.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval, u32 newv
8282
"# futex_atomic_cmpxchg_inatomic \n"
8383
"1: ll.w %1, %3 \n"
8484
" bne %1, %z4, 3f \n"
85-
" or $t0, %z5, $zero \n"
85+
" move $t0, %z5 \n"
8686
"2: sc.w $t0, %2 \n"
8787
" beq $zero, $t0, 1b \n"
8888
"3: \n"

arch/loongarch/include/asm/uaccess.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -162,7 +162,7 @@ do { \
162162
"2: \n" \
163163
" .section .fixup,\"ax\" \n" \
164164
"3: li.w %0, %3 \n" \
165-
" or %1, $zero, $zero \n" \
165+
" move %1, $zero \n" \
166166
" b 2b \n" \
167167
" .previous \n" \
168168
" .section __ex_table,\"a\" \n" \

arch/loongarch/kernel/head.S

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ SYM_CODE_START(kernel_entry) # kernel entry point
5050
/* KSave3 used for percpu base, initialized as 0 */
5151
csrwr zero, PERCPU_BASE_KS
5252
/* GPR21 used for percpu base (runtime), initialized as 0 */
53-
or u0, zero, zero
53+
move u0, zero
5454

5555
la tp, init_thread_union
5656
/* Set the SP after an empty pt_regs. */

0 commit comments

Comments
 (0)