Skip to content

Commit ea196c5

Browse files
mcd500palmer-dabbelt
authored andcommitted
riscv: __asm_copy_to-from_user: Fix: Typos in comments
Fixing typos and grammar mistakes and using more intuitive label name. Signed-off-by: Akira Tsukamoto <[email protected]> Fixes: ca6eaaa ("riscv: __asm_copy_to-from_user: Optimize unaligned memory access and pipeline stall") Signed-off-by: Palmer Dabbelt <[email protected]>
1 parent d4b3e01 commit ea196c5

File tree

1 file changed

+9
-9
lines changed

1 file changed

+9
-9
lines changed

arch/riscv/lib/uaccess.S

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -33,19 +33,20 @@ ENTRY(__asm_copy_from_user)
3333

3434
/*
3535
* Use byte copy only if too small.
36+
* SZREG holds 4 for RV32 and 8 for RV64
3637
*/
3738
li a3, 9*SZREG /* size must be larger than size in word_copy */
3839
bltu a2, a3, .Lbyte_copy_tail
3940

4041
/*
41-
* Copy first bytes until dst is align to word boundary.
42+
* Copy first bytes until dst is aligned to word boundary.
4243
* a0 - start of dst
4344
* t1 - start of aligned dst
4445
*/
4546
addi t1, a0, SZREG-1
4647
andi t1, t1, ~(SZREG-1)
4748
/* dst is already aligned, skip */
48-
beq a0, t1, .Lskip_first_bytes
49+
beq a0, t1, .Lskip_align_dst
4950
1:
5051
/* a5 - one byte for copying data */
5152
fixup lb a5, 0(a1), 10f
@@ -54,7 +55,7 @@ ENTRY(__asm_copy_from_user)
5455
addi a0, a0, 1 /* dst */
5556
bltu a0, t1, 1b /* t1 - start of aligned dst */
5657

57-
.Lskip_first_bytes:
58+
.Lskip_align_dst:
5859
/*
5960
* Now dst is aligned.
6061
* Use shift-copy if src is misaligned.
@@ -71,7 +72,6 @@ ENTRY(__asm_copy_from_user)
7172
*
7273
* a0 - start of aligned dst
7374
* a1 - start of aligned src
74-
* a3 - a1 & mask:(SZREG-1)
7575
* t0 - end of aligned dst
7676
*/
7777
addi t0, t0, -(8*SZREG) /* not to over run */
@@ -106,7 +106,7 @@ ENTRY(__asm_copy_from_user)
106106
* For misaligned copy we still perform aligned word copy, but
107107
* we need to use the value fetched from the previous iteration and
108108
* do some shifts.
109-
* This is safe because reading less than a word size.
109+
* This is safe because reading is less than a word size.
110110
*
111111
* a0 - start of aligned dst
112112
* a1 - start of src
@@ -116,7 +116,7 @@ ENTRY(__asm_copy_from_user)
116116
*/
117117
/* calculating aligned word boundary for dst */
118118
andi t1, t0, ~(SZREG-1)
119-
/* Converting unaligned src to aligned arc */
119+
/* Converting unaligned src to aligned src */
120120
andi a1, a1, ~(SZREG-1)
121121

122122
/*
@@ -128,7 +128,7 @@ ENTRY(__asm_copy_from_user)
128128
li a5, SZREG*8
129129
sub t4, a5, t3
130130

131-
/* Load the first word to combine with seceond word */
131+
/* Load the first word to combine with second word */
132132
fixup REG_L a5, 0(a1), 10f
133133

134134
3:
@@ -160,15 +160,15 @@ ENTRY(__asm_copy_from_user)
160160
* a1 - start of remaining src
161161
* t0 - end of remaining dst
162162
*/
163-
bgeu a0, t0, 5f
163+
bgeu a0, t0, .Lout_copy_user /* check if end of copy */
164164
4:
165165
fixup lb a5, 0(a1), 10f
166166
addi a1, a1, 1 /* src */
167167
fixup sb a5, 0(a0), 10f
168168
addi a0, a0, 1 /* dst */
169169
bltu a0, t0, 4b /* t0 - end of dst */
170170

171-
5:
171+
.Lout_copy_user:
172172
/* Disable access to user memory */
173173
csrc CSR_STATUS, t6
174174
li a0, 0

0 commit comments

Comments
 (0)