Skip to content

Commit 90e340c

Browse files
committed
Move copy_slice() into a submodule
Move implementation of copy_slice() into a submodule, so we could provide different implenations on different platforms. Signed-off-by: Liu Jiang <[email protected]>
1 parent 4e3e784 commit 90e340c

File tree

1 file changed

+57
-51
lines changed

1 file changed

+57
-51
lines changed

src/volatile_memory.rs

Lines changed: 57 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -38,6 +38,8 @@ use std::usize;
3838

3939
use crate::{ByteValued, Bytes};
4040

41+
use copy_slice_impl::copy_slice;
42+
4143
/// `VolatileMemory` related errors.
4244
#[allow(missing_docs)]
4345
#[derive(Debug)]
@@ -463,57 +465,6 @@ impl<'a> VolatileSlice<'a> {
463465
}
464466
}
465467

466-
// Return the largest value that `addr` is aligned to. Forcing this function to return 1 will
467-
// cause test_non_atomic_access to fail.
468-
fn alignment(addr: usize) -> usize {
469-
// Rust is silly and does not let me write addr & -addr.
470-
addr & (!addr + 1)
471-
}
472-
473-
// Has the same safety requirements as `read_volatile` + `write_volatile`, namely:
474-
// - `src_addr` and `dst_addr` must be valid for reads/writes.
475-
// - `src_addr` and `dst_addr` must be properly aligned with respect to `align`.
476-
// - `src_addr` must point to a properly initialized value, which is true here because
477-
// we're only using integer primitives.
478-
unsafe fn copy_single(align: usize, src_addr: usize, dst_addr: usize) {
479-
match align {
480-
8 => write_volatile(dst_addr as *mut u64, read_volatile(src_addr as *const u64)),
481-
4 => write_volatile(dst_addr as *mut u32, read_volatile(src_addr as *const u32)),
482-
2 => write_volatile(dst_addr as *mut u16, read_volatile(src_addr as *const u16)),
483-
1 => write_volatile(dst_addr as *mut u8, read_volatile(src_addr as *const u8)),
484-
_ => unreachable!(),
485-
}
486-
}
487-
488-
fn copy_slice(dst: &mut [u8], src: &[u8]) -> usize {
489-
let total = min(src.len(), dst.len());
490-
let mut left = total;
491-
492-
let mut src_addr = src.as_ptr() as usize;
493-
let mut dst_addr = dst.as_ptr() as usize;
494-
let align = min(alignment(src_addr), alignment(dst_addr));
495-
496-
let mut copy_aligned_slice = |min_align| {
497-
while align >= min_align && left >= min_align {
498-
// Safe because we check alignment beforehand, the memory areas are valid for
499-
// reads/writes, and the source always contains a valid value.
500-
unsafe { copy_single(min_align, src_addr, dst_addr) };
501-
src_addr += min_align;
502-
dst_addr += min_align;
503-
left -= min_align;
504-
}
505-
};
506-
507-
if size_of::<usize>() > 4 {
508-
copy_aligned_slice(8);
509-
}
510-
copy_aligned_slice(4);
511-
copy_aligned_slice(2);
512-
copy_aligned_slice(1);
513-
514-
total
515-
}
516-
517468
impl Bytes<usize> for VolatileSlice<'_> {
518469
type E = Error;
519470

@@ -1081,6 +1032,61 @@ impl<'a> From<VolatileSlice<'a>> for VolatileArrayRef<'a, u8> {
10811032
}
10821033
}
10831034

1035+
// Return the largest value that `addr` is aligned to. Forcing this function to return 1 will
1036+
// cause test_non_atomic_access to fail.
1037+
fn alignment(addr: usize) -> usize {
1038+
// Rust is silly and does not let me write addr & -addr.
1039+
addr & (!addr + 1)
1040+
}
1041+
1042+
mod copy_slice_impl {
1043+
use super::*;
1044+
1045+
// Has the same safety requirements as `read_volatile` + `write_volatile`, namely:
1046+
// - `src_addr` and `dst_addr` must be valid for reads/writes.
1047+
// - `src_addr` and `dst_addr` must be properly aligned with respect to `align`.
1048+
// - `src_addr` must point to a properly initialized value, which is true here because
1049+
// we're only using integer primitives.
1050+
unsafe fn copy_single(align: usize, src_addr: usize, dst_addr: usize) {
1051+
match align {
1052+
8 => write_volatile(dst_addr as *mut u64, read_volatile(src_addr as *const u64)),
1053+
4 => write_volatile(dst_addr as *mut u32, read_volatile(src_addr as *const u32)),
1054+
2 => write_volatile(dst_addr as *mut u16, read_volatile(src_addr as *const u16)),
1055+
1 => write_volatile(dst_addr as *mut u8, read_volatile(src_addr as *const u8)),
1056+
_ => unreachable!(),
1057+
}
1058+
}
1059+
1060+
pub(super) fn copy_slice(dst: &mut [u8], src: &[u8]) -> usize {
1061+
let total = min(src.len(), dst.len());
1062+
let mut left = total;
1063+
1064+
let mut src_addr = src.as_ptr() as usize;
1065+
let mut dst_addr = dst.as_ptr() as usize;
1066+
let align = min(alignment(src_addr), alignment(dst_addr));
1067+
1068+
let mut copy_aligned_slice = |min_align| {
1069+
while align >= min_align && left >= min_align {
1070+
// Safe because we check alignment beforehand, the memory areas are valid for
1071+
// reads/writes, and the source always contains a valid value.
1072+
unsafe { copy_single(min_align, src_addr, dst_addr) };
1073+
src_addr += min_align;
1074+
dst_addr += min_align;
1075+
left -= min_align;
1076+
}
1077+
};
1078+
1079+
if size_of::<usize>() > 4 {
1080+
copy_aligned_slice(8);
1081+
}
1082+
copy_aligned_slice(4);
1083+
copy_aligned_slice(2);
1084+
copy_aligned_slice(1);
1085+
1086+
total
1087+
}
1088+
}
1089+
10841090
#[cfg(test)]
10851091
mod tests {
10861092
use super::*;

0 commit comments

Comments
 (0)