Skip to content

Commit b620953

Browse files
committed
Provide generic impl Bytes for R: GuestMemoryRegion
This trait implementation of GuestMemoryRegion for `GuestRegionMmap`does not actually make use of the specifics of `GuestRegionMmap`, so can be completely generic in terms of `GuestMemoryRegion`. This allows us to move it to guest_memory.rs, eliminating one further instance of code depending on exactly one of mmap_unix.rs and mmap_xen.rs being compiled in. However, Paolo pointed out that sometimes this default impl might not be desired, for example QEMU might want some GuestMemoryRegion impl that represents PCI BARs. So hide this impl behind a marker trait, GuestMemoryRegionBytes, being implemented. Replace .unwrap() with error propagation via `?`, as we no longer know that we are dealing with a specific GuestMemoryRegion impl that always implements as_volatile_slice(). Signed-off-by: Patrick Roy <[email protected]>
1 parent 69ffd45 commit b620953

File tree

3 files changed

+163
-158
lines changed

3 files changed

+163
-158
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@
1414
`write_volatile_to` and `write_all_volatile_to` functions from the `GuestMemory` trait to the `Bytes` trait.
1515
- \[[#312](https://github.com/rust-vmm/vm-memory/pull/312)\]: Give `GuestMemory::find_region` a default implementation,
1616
based on linear search.
17+
- \[[#312](https://github.com/rust-vmm/vm-memory/pull/312)\]: Implement `Bytes<MemoryRegionAddress>` generically
18+
for all `R: GuestMemoryRegion`.
1719

1820
### Removed
1921

src/mmap.rs

Lines changed: 5 additions & 152 deletions
Original file line numberDiff line numberDiff line change
@@ -17,14 +17,13 @@ use std::borrow::Borrow;
1717
use std::io::{Seek, SeekFrom};
1818
use std::ops::Deref;
1919
use std::result;
20-
use std::sync::atomic::Ordering;
2120

2221
use crate::address::Address;
2322
use crate::bitmap::{Bitmap, BS};
2423
use crate::guest_memory::{self, FileOffset, GuestAddress, GuestUsize, MemoryRegionAddress};
25-
use crate::region::GuestMemoryRegion;
24+
use crate::region::{GuestMemoryRegion, GuestMemoryRegionBytes};
2625
use crate::volatile_memory::{VolatileMemory, VolatileSlice};
27-
use crate::{AtomicAccess, Bytes, Error, GuestRegionCollection, ReadVolatile, WriteVolatile};
26+
use crate::{Error, GuestRegionCollection};
2827

2928
#[cfg(all(not(feature = "xen"), unix))]
3029
pub use crate::mmap_unix::{Error as MmapRegionError, MmapRegion, MmapRegionBuilder};
@@ -143,154 +142,6 @@ impl<B: NewBitmap> GuestRegionMmap<B> {
143142
}
144143
}
145144

146-
impl<B: Bitmap> Bytes<MemoryRegionAddress> for GuestRegionMmap<B> {
147-
type E = guest_memory::Error;
148-
149-
/// # Examples
150-
/// * Write a slice at guest address 0x1200.
151-
///
152-
/// ```
153-
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
154-
/// #
155-
/// # let start_addr = GuestAddress(0x1000);
156-
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
157-
/// # .expect("Could not create guest memory");
158-
/// #
159-
/// let res = gm
160-
/// .write(&[1, 2, 3, 4, 5], GuestAddress(0x1200))
161-
/// .expect("Could not write to guest memory");
162-
/// assert_eq!(5, res);
163-
/// ```
164-
fn write(&self, buf: &[u8], addr: MemoryRegionAddress) -> guest_memory::Result<usize> {
165-
let maddr = addr.raw_value() as usize;
166-
self.as_volatile_slice()
167-
.unwrap()
168-
.write(buf, maddr)
169-
.map_err(Into::into)
170-
}
171-
172-
/// # Examples
173-
/// * Read a slice of length 16 at guestaddress 0x1200.
174-
///
175-
/// ```
176-
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
177-
/// #
178-
/// # let start_addr = GuestAddress(0x1000);
179-
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
180-
/// # .expect("Could not create guest memory");
181-
/// #
182-
/// let buf = &mut [0u8; 16];
183-
/// let res = gm
184-
/// .read(buf, GuestAddress(0x1200))
185-
/// .expect("Could not read from guest memory");
186-
/// assert_eq!(16, res);
187-
/// ```
188-
fn read(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> guest_memory::Result<usize> {
189-
let maddr = addr.raw_value() as usize;
190-
self.as_volatile_slice()
191-
.unwrap()
192-
.read(buf, maddr)
193-
.map_err(Into::into)
194-
}
195-
196-
fn write_slice(&self, buf: &[u8], addr: MemoryRegionAddress) -> guest_memory::Result<()> {
197-
let maddr = addr.raw_value() as usize;
198-
self.as_volatile_slice()
199-
.unwrap()
200-
.write_slice(buf, maddr)
201-
.map_err(Into::into)
202-
}
203-
204-
fn read_slice(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> guest_memory::Result<()> {
205-
let maddr = addr.raw_value() as usize;
206-
self.as_volatile_slice()
207-
.unwrap()
208-
.read_slice(buf, maddr)
209-
.map_err(Into::into)
210-
}
211-
212-
fn read_volatile_from<F>(
213-
&self,
214-
addr: MemoryRegionAddress,
215-
src: &mut F,
216-
count: usize,
217-
) -> Result<usize, Self::E>
218-
where
219-
F: ReadVolatile,
220-
{
221-
self.as_volatile_slice()
222-
.unwrap()
223-
.read_volatile_from(addr.0 as usize, src, count)
224-
.map_err(Into::into)
225-
}
226-
227-
fn read_exact_volatile_from<F>(
228-
&self,
229-
addr: MemoryRegionAddress,
230-
src: &mut F,
231-
count: usize,
232-
) -> Result<(), Self::E>
233-
where
234-
F: ReadVolatile,
235-
{
236-
self.as_volatile_slice()
237-
.unwrap()
238-
.read_exact_volatile_from(addr.0 as usize, src, count)
239-
.map_err(Into::into)
240-
}
241-
242-
fn write_volatile_to<F>(
243-
&self,
244-
addr: MemoryRegionAddress,
245-
dst: &mut F,
246-
count: usize,
247-
) -> Result<usize, Self::E>
248-
where
249-
F: WriteVolatile,
250-
{
251-
self.as_volatile_slice()
252-
.unwrap()
253-
.write_volatile_to(addr.0 as usize, dst, count)
254-
.map_err(Into::into)
255-
}
256-
257-
fn write_all_volatile_to<F>(
258-
&self,
259-
addr: MemoryRegionAddress,
260-
dst: &mut F,
261-
count: usize,
262-
) -> Result<(), Self::E>
263-
where
264-
F: WriteVolatile,
265-
{
266-
self.as_volatile_slice()
267-
.unwrap()
268-
.write_all_volatile_to(addr.0 as usize, dst, count)
269-
.map_err(Into::into)
270-
}
271-
272-
fn store<T: AtomicAccess>(
273-
&self,
274-
val: T,
275-
addr: MemoryRegionAddress,
276-
order: Ordering,
277-
) -> guest_memory::Result<()> {
278-
self.as_volatile_slice().and_then(|s| {
279-
s.store(val, addr.raw_value() as usize, order)
280-
.map_err(Into::into)
281-
})
282-
}
283-
284-
fn load<T: AtomicAccess>(
285-
&self,
286-
addr: MemoryRegionAddress,
287-
order: Ordering,
288-
) -> guest_memory::Result<T> {
289-
self.as_volatile_slice()
290-
.and_then(|s| s.load(addr.raw_value() as usize, order).map_err(Into::into))
291-
}
292-
}
293-
294145
impl<B: Bitmap> GuestMemoryRegion for GuestRegionMmap<B> {
295146
type B = B;
296147

@@ -337,6 +188,8 @@ impl<B: Bitmap> GuestMemoryRegion for GuestRegionMmap<B> {
337188
}
338189
}
339190

191+
impl<B: Bitmap> GuestMemoryRegionBytes for GuestRegionMmap<B> {}
192+
340193
/// [`GuestMemory`](trait.GuestMemory.html) implementation that mmaps the guest's memory
341194
/// in the current process.
342195
///
@@ -382,7 +235,7 @@ mod tests {
382235

383236
use crate::bitmap::tests::test_guest_memory_and_region;
384237
use crate::bitmap::AtomicBitmap;
385-
use crate::{Error, GuestAddressSpace, GuestMemory, GuestMemoryError};
238+
use crate::{Bytes, Error, GuestAddressSpace, GuestMemory, GuestMemoryError};
386239

387240
use std::io::Write;
388241
use std::mem;

src/region.rs

Lines changed: 156 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,17 @@
11
//! Module containing abstracts for dealing with contiguous regions of guest memory
22
33
use crate::bitmap::{Bitmap, BS};
4-
use crate::guest_memory::Error;
54
use crate::guest_memory::Result;
65
use crate::{
7-
Address, Bytes, FileOffset, GuestAddress, GuestMemory, GuestUsize, MemoryRegionAddress,
8-
VolatileSlice,
6+
Address, AtomicAccess, Bytes, FileOffset, GuestAddress, GuestMemory, GuestMemoryError,
7+
GuestUsize, MemoryRegionAddress, ReadVolatile, VolatileSlice, WriteVolatile,
98
};
9+
use std::sync::atomic::Ordering;
1010
use std::sync::Arc;
1111

1212
/// Represents a continuous region of guest physical memory.
1313
#[allow(clippy::len_without_is_empty)]
14-
pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
14+
pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = GuestMemoryError> {
1515
/// Type used for dirty memory tracking.
1616
type B: Bitmap;
1717

@@ -73,7 +73,7 @@ pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
7373
/// Rust memory safety model. It's the caller's responsibility to ensure that there's no
7474
/// concurrent accesses to the underlying guest memory.
7575
fn get_host_address(&self, _addr: MemoryRegionAddress) -> Result<*mut u8> {
76-
Err(Error::HostAddressNotAvailable)
76+
Err(GuestMemoryError::HostAddressNotAvailable)
7777
}
7878

7979
/// Returns information regarding the file and offset backing this memory region.
@@ -89,7 +89,7 @@ pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
8989
offset: MemoryRegionAddress,
9090
count: usize,
9191
) -> Result<VolatileSlice<BS<Self::B>>> {
92-
Err(Error::HostAddressNotAvailable)
92+
Err(GuestMemoryError::HostAddressNotAvailable)
9393
}
9494

9595
/// Gets a slice of memory for the entire region that supports volatile access.
@@ -299,3 +299,153 @@ impl<R: GuestMemoryRegion> GuestMemory for GuestRegionCollection<R> {
299299
self.regions.iter().map(AsRef::as_ref)
300300
}
301301
}
302+
303+
/// A marker trait that if implemented on a type `R` makes available a default
304+
/// implementation of `Bytes<MemoryRegionAddress>` for `R`, based on the assumption
305+
/// that the entire `GuestMemoryRegion` is just traditional memory without any
306+
/// special access requirements.
307+
pub trait GuestMemoryRegionBytes: GuestMemoryRegion {}
308+
309+
impl<R: GuestMemoryRegionBytes> Bytes<MemoryRegionAddress> for R {
310+
type E = GuestMemoryError;
311+
312+
/// # Examples
313+
/// * Write a slice at guest address 0x1200.
314+
///
315+
/// ```
316+
/// # #[cfg(feature = "backend-mmap")]
317+
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
318+
/// #
319+
/// # #[cfg(feature = "backend-mmap")]
320+
/// # {
321+
/// # let start_addr = GuestAddress(0x1000);
322+
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
323+
/// # .expect("Could not create guest memory");
324+
/// #
325+
/// let res = gm
326+
/// .write(&[1, 2, 3, 4, 5], GuestAddress(0x1200))
327+
/// .expect("Could not write to guest memory");
328+
/// assert_eq!(5, res);
329+
/// # }
330+
/// ```
331+
fn write(&self, buf: &[u8], addr: MemoryRegionAddress) -> Result<usize> {
332+
let maddr = addr.raw_value() as usize;
333+
self.as_volatile_slice()?
334+
.write(buf, maddr)
335+
.map_err(Into::into)
336+
}
337+
338+
/// # Examples
339+
/// * Read a slice of length 16 at guestaddress 0x1200.
340+
///
341+
/// ```
342+
/// # #[cfg(feature = "backend-mmap")]
343+
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
344+
/// #
345+
/// # #[cfg(feature = "backend-mmap")]
346+
/// # {
347+
/// # let start_addr = GuestAddress(0x1000);
348+
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
349+
/// # .expect("Could not create guest memory");
350+
/// #
351+
/// let buf = &mut [0u8; 16];
352+
/// let res = gm
353+
/// .read(buf, GuestAddress(0x1200))
354+
/// .expect("Could not read from guest memory");
355+
/// assert_eq!(16, res);
356+
/// # }
357+
/// ```
358+
fn read(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> Result<usize> {
359+
let maddr = addr.raw_value() as usize;
360+
self.as_volatile_slice()?
361+
.read(buf, maddr)
362+
.map_err(Into::into)
363+
}
364+
365+
fn write_slice(&self, buf: &[u8], addr: MemoryRegionAddress) -> Result<()> {
366+
let maddr = addr.raw_value() as usize;
367+
self.as_volatile_slice()?
368+
.write_slice(buf, maddr)
369+
.map_err(Into::into)
370+
}
371+
372+
fn read_slice(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> Result<()> {
373+
let maddr = addr.raw_value() as usize;
374+
self.as_volatile_slice()?
375+
.read_slice(buf, maddr)
376+
.map_err(Into::into)
377+
}
378+
379+
fn read_volatile_from<F>(
380+
&self,
381+
addr: MemoryRegionAddress,
382+
src: &mut F,
383+
count: usize,
384+
) -> Result<usize>
385+
where
386+
F: ReadVolatile,
387+
{
388+
self.as_volatile_slice()?
389+
.read_volatile_from(addr.0 as usize, src, count)
390+
.map_err(Into::into)
391+
}
392+
393+
fn read_exact_volatile_from<F>(
394+
&self,
395+
addr: MemoryRegionAddress,
396+
src: &mut F,
397+
count: usize,
398+
) -> Result<()>
399+
where
400+
F: ReadVolatile,
401+
{
402+
self.as_volatile_slice()?
403+
.read_exact_volatile_from(addr.0 as usize, src, count)
404+
.map_err(Into::into)
405+
}
406+
407+
fn write_volatile_to<F>(
408+
&self,
409+
addr: MemoryRegionAddress,
410+
dst: &mut F,
411+
count: usize,
412+
) -> Result<usize>
413+
where
414+
F: WriteVolatile,
415+
{
416+
self.as_volatile_slice()?
417+
.write_volatile_to(addr.0 as usize, dst, count)
418+
.map_err(Into::into)
419+
}
420+
421+
fn write_all_volatile_to<F>(
422+
&self,
423+
addr: MemoryRegionAddress,
424+
dst: &mut F,
425+
count: usize,
426+
) -> Result<()>
427+
where
428+
F: WriteVolatile,
429+
{
430+
self.as_volatile_slice()?
431+
.write_all_volatile_to(addr.0 as usize, dst, count)
432+
.map_err(Into::into)
433+
}
434+
435+
fn store<T: AtomicAccess>(
436+
&self,
437+
val: T,
438+
addr: MemoryRegionAddress,
439+
order: Ordering,
440+
) -> Result<()> {
441+
self.as_volatile_slice().and_then(|s| {
442+
s.store(val, addr.raw_value() as usize, order)
443+
.map_err(Into::into)
444+
})
445+
}
446+
447+
fn load<T: AtomicAccess>(&self, addr: MemoryRegionAddress, order: Ordering) -> Result<T> {
448+
self.as_volatile_slice()
449+
.and_then(|s| s.load(addr.raw_value() as usize, order).map_err(Into::into))
450+
}
451+
}

0 commit comments

Comments
 (0)