@@ -184,11 +184,11 @@ ENDPROC(memcpy_orig)
184
184
185
185
#ifndef CONFIG_UML
186
186
/*
187
- * memcpy_mcsafe_unrolled - memory copy with machine check exception handling
187
+ * __memcpy_mcsafe - memory copy with machine check exception handling
188
188
* Note that we only catch machine checks when reading the source addresses.
189
189
* Writes to target are posted and don't generate machine checks.
190
190
*/
191
- ENTRY(memcpy_mcsafe_unrolled )
191
+ ENTRY(__memcpy_mcsafe )
192
192
cmpl $8 , %edx
193
193
/* Less than 8 bytes? Go to byte copy loop */
194
194
jb .L_no_whole_words
@@ -213,49 +213,18 @@ ENTRY(memcpy_mcsafe_unrolled)
213
213
jnz .L_copy_leading_bytes
214
214
215
215
.L_8byte_aligned:
216
- /* Figure out how many whole cache lines (64-bytes) to copy */
217
- movl %edx , %ecx
218
- andl $63 , %edx
219
- shrl $6 , %ecx
220
- jz .L_no_whole_cache_lines
221
-
222
- /* Loop copying whole cache lines */
223
- .L_cache_w0: movq (%rsi ), %r8
224
- .L_cache_w1: movq 1*8 (%rsi ), %r9
225
- .L_cache_w2: movq 2*8 (%rsi ), %r10
226
- .L_cache_w3: movq 3*8 (%rsi ), %r11
227
- movq %r8 , (%rdi )
228
- movq %r9 , 1*8 (%rdi )
229
- movq %r10 , 2*8 (%rdi )
230
- movq %r11 , 3*8 (%rdi )
231
- .L_cache_w4: movq 4*8 (%rsi ), %r8
232
- .L_cache_w5: movq 5*8 (%rsi ), %r9
233
- .L_cache_w6: movq 6*8 (%rsi ), %r10
234
- .L_cache_w7: movq 7*8 (%rsi ), %r11
235
- movq %r8 , 4*8 (%rdi )
236
- movq %r9 , 5*8 (%rdi )
237
- movq %r10 , 6*8 (%rdi )
238
- movq %r11 , 7*8 (%rdi )
239
- leaq 64 (%rsi ), %rsi
240
- leaq 64 (%rdi ), %rdi
241
- decl %ecx
242
- jnz .L_cache_w0
243
-
244
- /* Are there any trailing 8-byte words? */
245
- .L_no_whole_cache_lines:
246
216
movl %edx , %ecx
247
217
andl $7 , %edx
248
218
shrl $3 , %ecx
249
219
jz .L_no_whole_words
250
220
251
- /* Copy trailing words */
252
- .L_copy_trailing_words:
221
+ .L_copy_words:
253
222
movq (%rsi ), %r8
254
- mov %r8 , (%rdi )
255
- leaq 8 ( %rsi ) , %rsi
256
- leaq 8 ( %rdi ) , %rdi
223
+ movq %r8 , (%rdi )
224
+ addq $8 , %rsi
225
+ addq $8 , %rdi
257
226
decl %ecx
258
- jnz .L_copy_trailing_words
227
+ jnz .L_copy_words
259
228
260
229
/* Any trailing bytes? */
261
230
.L_no_whole_words:
@@ -276,8 +245,8 @@ ENTRY(memcpy_mcsafe_unrolled)
276
245
.L_done_memcpy_trap:
277
246
xorq %rax , %rax
278
247
ret
279
- ENDPROC(memcpy_mcsafe_unrolled )
280
- EXPORT_SYMBOL_GPL(memcpy_mcsafe_unrolled )
248
+ ENDPROC(__memcpy_mcsafe )
249
+ EXPORT_SYMBOL_GPL(__memcpy_mcsafe )
281
250
282
251
.section .fixup, "ax"
283
252
/* Return -EFAULT for any failure */
@@ -288,14 +257,6 @@ EXPORT_SYMBOL_GPL(memcpy_mcsafe_unrolled)
288
257
.previous
289
258
290
259
_ASM_EXTABLE_FAULT(.L_copy_leading_bytes, .L_memcpy_mcsafe_fail)
291
- _ASM_EXTABLE_FAULT(.L_cache_w0, .L_memcpy_mcsafe_fail)
292
- _ASM_EXTABLE_FAULT(.L_cache_w1, .L_memcpy_mcsafe_fail)
293
- _ASM_EXTABLE_FAULT(.L_cache_w2, .L_memcpy_mcsafe_fail)
294
- _ASM_EXTABLE_FAULT(.L_cache_w3, .L_memcpy_mcsafe_fail)
295
- _ASM_EXTABLE_FAULT(.L_cache_w4, .L_memcpy_mcsafe_fail)
296
- _ASM_EXTABLE_FAULT(.L_cache_w5, .L_memcpy_mcsafe_fail)
297
- _ASM_EXTABLE_FAULT(.L_cache_w6, .L_memcpy_mcsafe_fail)
298
- _ASM_EXTABLE_FAULT(.L_cache_w7, .L_memcpy_mcsafe_fail)
299
- _ASM_EXTABLE_FAULT(.L_copy_trailing_words, .L_memcpy_mcsafe_fail)
260
+ _ASM_EXTABLE_FAULT(.L_copy_words, .L_memcpy_mcsafe_fail)
300
261
_ASM_EXTABLE_FAULT(.L_copy_trailing_bytes, .L_memcpy_mcsafe_fail)
301
262
#endif
0 commit comments