Skip to content

Commit 23d03f9

Browse files
committed
Provide generic impl Bytes for R: GuestMemoryRegion
This trait implementation of GuestMemoryRegion for `GuestRegionMmap`does not actually make use of the specifics of `GuestRegionMmap`, so can be completely generic in terms of `GuestMemoryRegion`. This allows us to move it to guest_memory.rs, eliminating one further instance of code depending on exactly one of mmap_unix.rs and mmap_xen.rs being compiled in. However, Paolo pointed out that sometimes this default impl might not be desired, for example QEMU might want some GuestMemoryRegion impl that represents PCI BARs. So hide this impl behind a marker trait, GuestMemoryRegionBytes, being implemented. Replace .unwrap() with error propagation via `?`, as we no longer know that we are dealing with a specific GuestMemoryRegion impl that always implements as_volatile_slice(). Signed-off-by: Patrick Roy <[email protected]>
1 parent 8de505f commit 23d03f9

File tree

3 files changed

+163
-158
lines changed

3 files changed

+163
-158
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@
1414
`write_volatile_to` and `write_all_volatile_to` functions from the `GuestMemory` trait to the `Bytes` trait.
1515
- \[[#312](https://github.com/rust-vmm/vm-memory/pull/312)\]: Give `GuestMemory::find_region` a default implementation,
1616
based on linear search.
17+
- \[[#312](https://github.com/rust-vmm/vm-memory/pull/312)\]: Implement `Bytes<MemoryRegionAddress>` generically
18+
for all `R: GuestMemoryRegion`.
1719

1820
### Removed
1921

src/mmap/mod.rs

Lines changed: 5 additions & 152 deletions
Original file line numberDiff line numberDiff line change
@@ -17,14 +17,13 @@ use std::borrow::Borrow;
1717
use std::io::{Seek, SeekFrom};
1818
use std::ops::Deref;
1919
use std::result;
20-
use std::sync::atomic::Ordering;
2120

2221
use crate::address::Address;
2322
use crate::bitmap::{Bitmap, BS};
2423
use crate::guest_memory::{self, FileOffset, GuestAddress, GuestUsize, MemoryRegionAddress};
25-
use crate::region::GuestMemoryRegion;
24+
use crate::region::{GuestMemoryRegion, GuestMemoryRegionBytes};
2625
use crate::volatile_memory::{VolatileMemory, VolatileSlice};
27-
use crate::{AtomicAccess, Bytes, Error, GuestRegionCollection, ReadVolatile, WriteVolatile};
26+
use crate::{Error, GuestRegionCollection};
2827

2928
// re-export for backward compat, as the trait used to be defined in mmap.rs
3029
pub use crate::bitmap::NewBitmap;
@@ -145,154 +144,6 @@ impl<B: NewBitmap> GuestRegionMmap<B> {
145144
}
146145
}
147146

148-
impl<B: Bitmap> Bytes<MemoryRegionAddress> for GuestRegionMmap<B> {
149-
type E = guest_memory::Error;
150-
151-
/// # Examples
152-
/// * Write a slice at guest address 0x1200.
153-
///
154-
/// ```
155-
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
156-
/// #
157-
/// # let start_addr = GuestAddress(0x1000);
158-
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
159-
/// # .expect("Could not create guest memory");
160-
/// #
161-
/// let res = gm
162-
/// .write(&[1, 2, 3, 4, 5], GuestAddress(0x1200))
163-
/// .expect("Could not write to guest memory");
164-
/// assert_eq!(5, res);
165-
/// ```
166-
fn write(&self, buf: &[u8], addr: MemoryRegionAddress) -> guest_memory::Result<usize> {
167-
let maddr = addr.raw_value() as usize;
168-
self.as_volatile_slice()
169-
.unwrap()
170-
.write(buf, maddr)
171-
.map_err(Into::into)
172-
}
173-
174-
/// # Examples
175-
/// * Read a slice of length 16 at guestaddress 0x1200.
176-
///
177-
/// ```
178-
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
179-
/// #
180-
/// # let start_addr = GuestAddress(0x1000);
181-
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
182-
/// # .expect("Could not create guest memory");
183-
/// #
184-
/// let buf = &mut [0u8; 16];
185-
/// let res = gm
186-
/// .read(buf, GuestAddress(0x1200))
187-
/// .expect("Could not read from guest memory");
188-
/// assert_eq!(16, res);
189-
/// ```
190-
fn read(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> guest_memory::Result<usize> {
191-
let maddr = addr.raw_value() as usize;
192-
self.as_volatile_slice()
193-
.unwrap()
194-
.read(buf, maddr)
195-
.map_err(Into::into)
196-
}
197-
198-
fn write_slice(&self, buf: &[u8], addr: MemoryRegionAddress) -> guest_memory::Result<()> {
199-
let maddr = addr.raw_value() as usize;
200-
self.as_volatile_slice()
201-
.unwrap()
202-
.write_slice(buf, maddr)
203-
.map_err(Into::into)
204-
}
205-
206-
fn read_slice(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> guest_memory::Result<()> {
207-
let maddr = addr.raw_value() as usize;
208-
self.as_volatile_slice()
209-
.unwrap()
210-
.read_slice(buf, maddr)
211-
.map_err(Into::into)
212-
}
213-
214-
fn read_volatile_from<F>(
215-
&self,
216-
addr: MemoryRegionAddress,
217-
src: &mut F,
218-
count: usize,
219-
) -> Result<usize, Self::E>
220-
where
221-
F: ReadVolatile,
222-
{
223-
self.as_volatile_slice()
224-
.unwrap()
225-
.read_volatile_from(addr.0 as usize, src, count)
226-
.map_err(Into::into)
227-
}
228-
229-
fn read_exact_volatile_from<F>(
230-
&self,
231-
addr: MemoryRegionAddress,
232-
src: &mut F,
233-
count: usize,
234-
) -> Result<(), Self::E>
235-
where
236-
F: ReadVolatile,
237-
{
238-
self.as_volatile_slice()
239-
.unwrap()
240-
.read_exact_volatile_from(addr.0 as usize, src, count)
241-
.map_err(Into::into)
242-
}
243-
244-
fn write_volatile_to<F>(
245-
&self,
246-
addr: MemoryRegionAddress,
247-
dst: &mut F,
248-
count: usize,
249-
) -> Result<usize, Self::E>
250-
where
251-
F: WriteVolatile,
252-
{
253-
self.as_volatile_slice()
254-
.unwrap()
255-
.write_volatile_to(addr.0 as usize, dst, count)
256-
.map_err(Into::into)
257-
}
258-
259-
fn write_all_volatile_to<F>(
260-
&self,
261-
addr: MemoryRegionAddress,
262-
dst: &mut F,
263-
count: usize,
264-
) -> Result<(), Self::E>
265-
where
266-
F: WriteVolatile,
267-
{
268-
self.as_volatile_slice()
269-
.unwrap()
270-
.write_all_volatile_to(addr.0 as usize, dst, count)
271-
.map_err(Into::into)
272-
}
273-
274-
fn store<T: AtomicAccess>(
275-
&self,
276-
val: T,
277-
addr: MemoryRegionAddress,
278-
order: Ordering,
279-
) -> guest_memory::Result<()> {
280-
self.as_volatile_slice().and_then(|s| {
281-
s.store(val, addr.raw_value() as usize, order)
282-
.map_err(Into::into)
283-
})
284-
}
285-
286-
fn load<T: AtomicAccess>(
287-
&self,
288-
addr: MemoryRegionAddress,
289-
order: Ordering,
290-
) -> guest_memory::Result<T> {
291-
self.as_volatile_slice()
292-
.and_then(|s| s.load(addr.raw_value() as usize, order).map_err(Into::into))
293-
}
294-
}
295-
296147
impl<B: Bitmap> GuestMemoryRegion for GuestRegionMmap<B> {
297148
type B = B;
298149

@@ -339,6 +190,8 @@ impl<B: Bitmap> GuestMemoryRegion for GuestRegionMmap<B> {
339190
}
340191
}
341192

193+
impl<B: Bitmap> GuestMemoryRegionBytes for GuestRegionMmap<B> {}
194+
342195
/// [`GuestMemory`](trait.GuestMemory.html) implementation that mmaps the guest's memory
343196
/// in the current process.
344197
///
@@ -384,7 +237,7 @@ mod tests {
384237

385238
use crate::bitmap::tests::test_guest_memory_and_region;
386239
use crate::bitmap::AtomicBitmap;
387-
use crate::{Error, GuestAddressSpace, GuestMemory, GuestMemoryError};
240+
use crate::{Bytes, Error, GuestAddressSpace, GuestMemory, GuestMemoryError};
388241

389242
use std::io::Write;
390243
use std::mem;

src/region.rs

Lines changed: 156 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,17 @@
11
//! Module containing abstracts for dealing with contiguous regions of guest memory
22
33
use crate::bitmap::{Bitmap, BS};
4-
use crate::guest_memory::Error;
54
use crate::guest_memory::Result;
65
use crate::{
7-
Address, Bytes, FileOffset, GuestAddress, GuestMemory, GuestUsize, MemoryRegionAddress,
8-
VolatileSlice,
6+
Address, AtomicAccess, Bytes, FileOffset, GuestAddress, GuestMemory, GuestMemoryError,
7+
GuestUsize, MemoryRegionAddress, ReadVolatile, VolatileSlice, WriteVolatile,
98
};
9+
use std::sync::atomic::Ordering;
1010
use std::sync::Arc;
1111

1212
/// Represents a continuous region of guest physical memory.
1313
#[allow(clippy::len_without_is_empty)]
14-
pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
14+
pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = GuestMemoryError> {
1515
/// Type used for dirty memory tracking.
1616
type B: Bitmap;
1717

@@ -73,7 +73,7 @@ pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
7373
/// Rust memory safety model. It's the caller's responsibility to ensure that there's no
7474
/// concurrent accesses to the underlying guest memory.
7575
fn get_host_address(&self, _addr: MemoryRegionAddress) -> Result<*mut u8> {
76-
Err(Error::HostAddressNotAvailable)
76+
Err(GuestMemoryError::HostAddressNotAvailable)
7777
}
7878

7979
/// Returns information regarding the file and offset backing this memory region.
@@ -89,7 +89,7 @@ pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
8989
offset: MemoryRegionAddress,
9090
count: usize,
9191
) -> Result<VolatileSlice<BS<Self::B>>> {
92-
Err(Error::HostAddressNotAvailable)
92+
Err(GuestMemoryError::HostAddressNotAvailable)
9393
}
9494

9595
/// Gets a slice of memory for the entire region that supports volatile access.
@@ -299,3 +299,153 @@ impl<R: GuestMemoryRegion> GuestMemory for GuestRegionCollection<R> {
299299
self.regions.iter().map(AsRef::as_ref)
300300
}
301301
}
302+
303+
/// A marker trait that if implemented on a type `R` makes available a default
304+
/// implementation of `Bytes<MemoryRegionAddress>` for `R`, based on the assumption
305+
/// that the entire `GuestMemoryRegion` is just traditional memory without any
306+
/// special access requirements.
307+
pub trait GuestMemoryRegionBytes: GuestMemoryRegion {}
308+
309+
impl<R: GuestMemoryRegionBytes> Bytes<MemoryRegionAddress> for R {
310+
type E = GuestMemoryError;
311+
312+
/// # Examples
313+
/// * Write a slice at guest address 0x1200.
314+
///
315+
/// ```
316+
/// # #[cfg(feature = "backend-mmap")]
317+
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
318+
/// #
319+
/// # #[cfg(feature = "backend-mmap")]
320+
/// # {
321+
/// # let start_addr = GuestAddress(0x1000);
322+
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
323+
/// # .expect("Could not create guest memory");
324+
/// #
325+
/// let res = gm
326+
/// .write(&[1, 2, 3, 4, 5], GuestAddress(0x1200))
327+
/// .expect("Could not write to guest memory");
328+
/// assert_eq!(5, res);
329+
/// # }
330+
/// ```
331+
fn write(&self, buf: &[u8], addr: MemoryRegionAddress) -> Result<usize> {
332+
let maddr = addr.raw_value() as usize;
333+
self.as_volatile_slice()?
334+
.write(buf, maddr)
335+
.map_err(Into::into)
336+
}
337+
338+
/// # Examples
339+
/// * Read a slice of length 16 at guestaddress 0x1200.
340+
///
341+
/// ```
342+
/// # #[cfg(feature = "backend-mmap")]
343+
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
344+
/// #
345+
/// # #[cfg(feature = "backend-mmap")]
346+
/// # {
347+
/// # let start_addr = GuestAddress(0x1000);
348+
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
349+
/// # .expect("Could not create guest memory");
350+
/// #
351+
/// let buf = &mut [0u8; 16];
352+
/// let res = gm
353+
/// .read(buf, GuestAddress(0x1200))
354+
/// .expect("Could not read from guest memory");
355+
/// assert_eq!(16, res);
356+
/// # }
357+
/// ```
358+
fn read(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> Result<usize> {
359+
let maddr = addr.raw_value() as usize;
360+
self.as_volatile_slice()?
361+
.read(buf, maddr)
362+
.map_err(Into::into)
363+
}
364+
365+
fn write_slice(&self, buf: &[u8], addr: MemoryRegionAddress) -> Result<()> {
366+
let maddr = addr.raw_value() as usize;
367+
self.as_volatile_slice()?
368+
.write_slice(buf, maddr)
369+
.map_err(Into::into)
370+
}
371+
372+
fn read_slice(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> Result<()> {
373+
let maddr = addr.raw_value() as usize;
374+
self.as_volatile_slice()?
375+
.read_slice(buf, maddr)
376+
.map_err(Into::into)
377+
}
378+
379+
fn read_volatile_from<F>(
380+
&self,
381+
addr: MemoryRegionAddress,
382+
src: &mut F,
383+
count: usize,
384+
) -> Result<usize>
385+
where
386+
F: ReadVolatile,
387+
{
388+
self.as_volatile_slice()?
389+
.read_volatile_from(addr.0 as usize, src, count)
390+
.map_err(Into::into)
391+
}
392+
393+
fn read_exact_volatile_from<F>(
394+
&self,
395+
addr: MemoryRegionAddress,
396+
src: &mut F,
397+
count: usize,
398+
) -> Result<()>
399+
where
400+
F: ReadVolatile,
401+
{
402+
self.as_volatile_slice()?
403+
.read_exact_volatile_from(addr.0 as usize, src, count)
404+
.map_err(Into::into)
405+
}
406+
407+
fn write_volatile_to<F>(
408+
&self,
409+
addr: MemoryRegionAddress,
410+
dst: &mut F,
411+
count: usize,
412+
) -> Result<usize>
413+
where
414+
F: WriteVolatile,
415+
{
416+
self.as_volatile_slice()?
417+
.write_volatile_to(addr.0 as usize, dst, count)
418+
.map_err(Into::into)
419+
}
420+
421+
fn write_all_volatile_to<F>(
422+
&self,
423+
addr: MemoryRegionAddress,
424+
dst: &mut F,
425+
count: usize,
426+
) -> Result<()>
427+
where
428+
F: WriteVolatile,
429+
{
430+
self.as_volatile_slice()?
431+
.write_all_volatile_to(addr.0 as usize, dst, count)
432+
.map_err(Into::into)
433+
}
434+
435+
fn store<T: AtomicAccess>(
436+
&self,
437+
val: T,
438+
addr: MemoryRegionAddress,
439+
order: Ordering,
440+
) -> Result<()> {
441+
self.as_volatile_slice().and_then(|s| {
442+
s.store(val, addr.raw_value() as usize, order)
443+
.map_err(Into::into)
444+
})
445+
}
446+
447+
fn load<T: AtomicAccess>(&self, addr: MemoryRegionAddress, order: Ordering) -> Result<T> {
448+
self.as_volatile_slice()
449+
.and_then(|s| s.load(addr.raw_value() as usize, order).map_err(Into::into))
450+
}
451+
}

0 commit comments

Comments
 (0)