Skip to content

Commit 35b483d

Browse files
committed
Provide generic impl Bytes for R: GuestMemoryRegion
This trait implementation of GuestMemoryRegion for `GuestRegionMmap`does not actually make use of the specifics of `GuestRegionMmap`, so can be completely generic in terms of `GuestMemoryRegion`. This allows us to move it to guest_memory.rs, eliminating one further instance of code depending on exactly one of mmap_unix.rs and mmap_xen.rs being compiled in. However, Paolo pointed out that sometimes this default impl might not be desired, for example QEMU might want some GuestMemoryRegion impl that represents PCI BARs. So hide this impl behind a marker trait, GuestMemoryRegionBytes, being implemented. Replace .unwrap() with error propagation via `?`, as we no longer know that we are dealing with a specific GuestMemoryRegion impl that always implements as_volatile_slice(). Signed-off-by: Patrick Roy <[email protected]>
1 parent de0cb27 commit 35b483d

File tree

3 files changed

+163
-158
lines changed

3 files changed

+163
-158
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@
1414
`write_volatile_to` and `write_all_volatile_to` functions from the `GuestMemory` trait to the `Bytes` trait.
1515
- \[[#312](https://github.com/rust-vmm/vm-memory/pull/312)\]: Give `GuestMemory::find_region` a default implementation,
1616
based on linear search.
17+
- \[[#312](https://github.com/rust-vmm/vm-memory/pull/312)\]: Implement `Bytes<MemoryRegionAddress>` generically
18+
for all `R: GuestMemoryRegion`.
1719

1820
- \[#324](https:////github.com/rust-vmm/vm-memory/pull/324)\] `GuestMemoryRegion::bitmap()` now returns a `BitmapSlice`. Accessing the full bitmap is now possible only if the type of the memory region is know, for example with `MmapRegion::bitmap()`.
1921

src/mmap/mod.rs

Lines changed: 5 additions & 152 deletions
Original file line numberDiff line numberDiff line change
@@ -15,14 +15,13 @@
1515
use std::borrow::Borrow;
1616
use std::ops::Deref;
1717
use std::result;
18-
use std::sync::atomic::Ordering;
1918

2019
use crate::address::Address;
2120
use crate::bitmap::{Bitmap, BS};
2221
use crate::guest_memory::{self, FileOffset, GuestAddress, GuestUsize, MemoryRegionAddress};
23-
use crate::region::GuestMemoryRegion;
22+
use crate::region::{GuestMemoryRegion, GuestMemoryRegionBytes};
2423
use crate::volatile_memory::{VolatileMemory, VolatileSlice};
25-
use crate::{AtomicAccess, Bytes, Error, GuestRegionCollection, ReadVolatile, WriteVolatile};
24+
use crate::{Error, GuestRegionCollection};
2625

2726
// re-export for backward compat, as the trait used to be defined in mmap.rs
2827
pub use crate::bitmap::NewBitmap;
@@ -115,154 +114,6 @@ impl<B: NewBitmap> GuestRegionMmap<B> {
115114
}
116115
}
117116

118-
impl<B: Bitmap> Bytes<MemoryRegionAddress> for GuestRegionMmap<B> {
119-
type E = guest_memory::Error;
120-
121-
/// # Examples
122-
/// * Write a slice at guest address 0x1200.
123-
///
124-
/// ```
125-
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
126-
/// #
127-
/// # let start_addr = GuestAddress(0x1000);
128-
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
129-
/// # .expect("Could not create guest memory");
130-
/// #
131-
/// let res = gm
132-
/// .write(&[1, 2, 3, 4, 5], GuestAddress(0x1200))
133-
/// .expect("Could not write to guest memory");
134-
/// assert_eq!(5, res);
135-
/// ```
136-
fn write(&self, buf: &[u8], addr: MemoryRegionAddress) -> guest_memory::Result<usize> {
137-
let maddr = addr.raw_value() as usize;
138-
self.as_volatile_slice()
139-
.unwrap()
140-
.write(buf, maddr)
141-
.map_err(Into::into)
142-
}
143-
144-
/// # Examples
145-
/// * Read a slice of length 16 at guestaddress 0x1200.
146-
///
147-
/// ```
148-
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
149-
/// #
150-
/// # let start_addr = GuestAddress(0x1000);
151-
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
152-
/// # .expect("Could not create guest memory");
153-
/// #
154-
/// let buf = &mut [0u8; 16];
155-
/// let res = gm
156-
/// .read(buf, GuestAddress(0x1200))
157-
/// .expect("Could not read from guest memory");
158-
/// assert_eq!(16, res);
159-
/// ```
160-
fn read(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> guest_memory::Result<usize> {
161-
let maddr = addr.raw_value() as usize;
162-
self.as_volatile_slice()
163-
.unwrap()
164-
.read(buf, maddr)
165-
.map_err(Into::into)
166-
}
167-
168-
fn write_slice(&self, buf: &[u8], addr: MemoryRegionAddress) -> guest_memory::Result<()> {
169-
let maddr = addr.raw_value() as usize;
170-
self.as_volatile_slice()
171-
.unwrap()
172-
.write_slice(buf, maddr)
173-
.map_err(Into::into)
174-
}
175-
176-
fn read_slice(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> guest_memory::Result<()> {
177-
let maddr = addr.raw_value() as usize;
178-
self.as_volatile_slice()
179-
.unwrap()
180-
.read_slice(buf, maddr)
181-
.map_err(Into::into)
182-
}
183-
184-
fn read_volatile_from<F>(
185-
&self,
186-
addr: MemoryRegionAddress,
187-
src: &mut F,
188-
count: usize,
189-
) -> Result<usize, Self::E>
190-
where
191-
F: ReadVolatile,
192-
{
193-
self.as_volatile_slice()
194-
.unwrap()
195-
.read_volatile_from(addr.0 as usize, src, count)
196-
.map_err(Into::into)
197-
}
198-
199-
fn read_exact_volatile_from<F>(
200-
&self,
201-
addr: MemoryRegionAddress,
202-
src: &mut F,
203-
count: usize,
204-
) -> Result<(), Self::E>
205-
where
206-
F: ReadVolatile,
207-
{
208-
self.as_volatile_slice()
209-
.unwrap()
210-
.read_exact_volatile_from(addr.0 as usize, src, count)
211-
.map_err(Into::into)
212-
}
213-
214-
fn write_volatile_to<F>(
215-
&self,
216-
addr: MemoryRegionAddress,
217-
dst: &mut F,
218-
count: usize,
219-
) -> Result<usize, Self::E>
220-
where
221-
F: WriteVolatile,
222-
{
223-
self.as_volatile_slice()
224-
.unwrap()
225-
.write_volatile_to(addr.0 as usize, dst, count)
226-
.map_err(Into::into)
227-
}
228-
229-
fn write_all_volatile_to<F>(
230-
&self,
231-
addr: MemoryRegionAddress,
232-
dst: &mut F,
233-
count: usize,
234-
) -> Result<(), Self::E>
235-
where
236-
F: WriteVolatile,
237-
{
238-
self.as_volatile_slice()
239-
.unwrap()
240-
.write_all_volatile_to(addr.0 as usize, dst, count)
241-
.map_err(Into::into)
242-
}
243-
244-
fn store<T: AtomicAccess>(
245-
&self,
246-
val: T,
247-
addr: MemoryRegionAddress,
248-
order: Ordering,
249-
) -> guest_memory::Result<()> {
250-
self.as_volatile_slice().and_then(|s| {
251-
s.store(val, addr.raw_value() as usize, order)
252-
.map_err(Into::into)
253-
})
254-
}
255-
256-
fn load<T: AtomicAccess>(
257-
&self,
258-
addr: MemoryRegionAddress,
259-
order: Ordering,
260-
) -> guest_memory::Result<T> {
261-
self.as_volatile_slice()
262-
.and_then(|s| s.load(addr.raw_value() as usize, order).map_err(Into::into))
263-
}
264-
}
265-
266117
impl<B: Bitmap> GuestMemoryRegion for GuestRegionMmap<B> {
267118
type B = B;
268119

@@ -309,6 +160,8 @@ impl<B: Bitmap> GuestMemoryRegion for GuestRegionMmap<B> {
309160
}
310161
}
311162

163+
impl<B: Bitmap> GuestMemoryRegionBytes for GuestRegionMmap<B> {}
164+
312165
/// [`GuestMemory`](trait.GuestMemory.html) implementation that mmaps the guest's memory
313166
/// in the current process.
314167
///
@@ -354,7 +207,7 @@ mod tests {
354207

355208
use crate::bitmap::tests::test_guest_memory_and_region;
356209
use crate::bitmap::AtomicBitmap;
357-
use crate::{Error, GuestAddressSpace, GuestMemory, GuestMemoryError};
210+
use crate::{Bytes, Error, GuestAddressSpace, GuestMemory, GuestMemoryError};
358211

359212
use std::io::Write;
360213
use std::mem;

src/region.rs

Lines changed: 156 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,17 @@
11
//! Module containing abstracts for dealing with contiguous regions of guest memory
22
33
use crate::bitmap::{Bitmap, BS};
4-
use crate::guest_memory::Error;
54
use crate::guest_memory::Result;
65
use crate::{
7-
Address, Bytes, FileOffset, GuestAddress, GuestMemory, GuestUsize, MemoryRegionAddress,
8-
VolatileSlice,
6+
Address, AtomicAccess, Bytes, FileOffset, GuestAddress, GuestMemory, GuestMemoryError,
7+
GuestUsize, MemoryRegionAddress, ReadVolatile, VolatileSlice, WriteVolatile,
98
};
9+
use std::sync::atomic::Ordering;
1010
use std::sync::Arc;
1111

1212
/// Represents a continuous region of guest physical memory.
1313
#[allow(clippy::len_without_is_empty)]
14-
pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
14+
pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = GuestMemoryError> {
1515
/// Type used for dirty memory tracking.
1616
type B: Bitmap;
1717

@@ -73,7 +73,7 @@ pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
7373
/// Rust memory safety model. It's the caller's responsibility to ensure that there's no
7474
/// concurrent accesses to the underlying guest memory.
7575
fn get_host_address(&self, _addr: MemoryRegionAddress) -> Result<*mut u8> {
76-
Err(Error::HostAddressNotAvailable)
76+
Err(GuestMemoryError::HostAddressNotAvailable)
7777
}
7878

7979
/// Returns information regarding the file and offset backing this memory region.
@@ -89,7 +89,7 @@ pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
8989
offset: MemoryRegionAddress,
9090
count: usize,
9191
) -> Result<VolatileSlice<BS<Self::B>>> {
92-
Err(Error::HostAddressNotAvailable)
92+
Err(GuestMemoryError::HostAddressNotAvailable)
9393
}
9494

9595
/// Gets a slice of memory for the entire region that supports volatile access.
@@ -299,3 +299,153 @@ impl<R: GuestMemoryRegion> GuestMemory for GuestRegionCollection<R> {
299299
self.regions.iter().map(AsRef::as_ref)
300300
}
301301
}
302+
303+
/// A marker trait that if implemented on a type `R` makes available a default
304+
/// implementation of `Bytes<MemoryRegionAddress>` for `R`, based on the assumption
305+
/// that the entire `GuestMemoryRegion` is just traditional memory without any
306+
/// special access requirements.
307+
pub trait GuestMemoryRegionBytes: GuestMemoryRegion {}
308+
309+
impl<R: GuestMemoryRegionBytes> Bytes<MemoryRegionAddress> for R {
310+
type E = GuestMemoryError;
311+
312+
/// # Examples
313+
/// * Write a slice at guest address 0x1200.
314+
///
315+
/// ```
316+
/// # #[cfg(feature = "backend-mmap")]
317+
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
318+
/// #
319+
/// # #[cfg(feature = "backend-mmap")]
320+
/// # {
321+
/// # let start_addr = GuestAddress(0x1000);
322+
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
323+
/// # .expect("Could not create guest memory");
324+
/// #
325+
/// let res = gm
326+
/// .write(&[1, 2, 3, 4, 5], GuestAddress(0x1200))
327+
/// .expect("Could not write to guest memory");
328+
/// assert_eq!(5, res);
329+
/// # }
330+
/// ```
331+
fn write(&self, buf: &[u8], addr: MemoryRegionAddress) -> Result<usize> {
332+
let maddr = addr.raw_value() as usize;
333+
self.as_volatile_slice()?
334+
.write(buf, maddr)
335+
.map_err(Into::into)
336+
}
337+
338+
/// # Examples
339+
/// * Read a slice of length 16 at guestaddress 0x1200.
340+
///
341+
/// ```
342+
/// # #[cfg(feature = "backend-mmap")]
343+
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
344+
/// #
345+
/// # #[cfg(feature = "backend-mmap")]
346+
/// # {
347+
/// # let start_addr = GuestAddress(0x1000);
348+
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
349+
/// # .expect("Could not create guest memory");
350+
/// #
351+
/// let buf = &mut [0u8; 16];
352+
/// let res = gm
353+
/// .read(buf, GuestAddress(0x1200))
354+
/// .expect("Could not read from guest memory");
355+
/// assert_eq!(16, res);
356+
/// # }
357+
/// ```
358+
fn read(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> Result<usize> {
359+
let maddr = addr.raw_value() as usize;
360+
self.as_volatile_slice()?
361+
.read(buf, maddr)
362+
.map_err(Into::into)
363+
}
364+
365+
fn write_slice(&self, buf: &[u8], addr: MemoryRegionAddress) -> Result<()> {
366+
let maddr = addr.raw_value() as usize;
367+
self.as_volatile_slice()?
368+
.write_slice(buf, maddr)
369+
.map_err(Into::into)
370+
}
371+
372+
fn read_slice(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> Result<()> {
373+
let maddr = addr.raw_value() as usize;
374+
self.as_volatile_slice()?
375+
.read_slice(buf, maddr)
376+
.map_err(Into::into)
377+
}
378+
379+
fn read_volatile_from<F>(
380+
&self,
381+
addr: MemoryRegionAddress,
382+
src: &mut F,
383+
count: usize,
384+
) -> Result<usize>
385+
where
386+
F: ReadVolatile,
387+
{
388+
self.as_volatile_slice()?
389+
.read_volatile_from(addr.0 as usize, src, count)
390+
.map_err(Into::into)
391+
}
392+
393+
fn read_exact_volatile_from<F>(
394+
&self,
395+
addr: MemoryRegionAddress,
396+
src: &mut F,
397+
count: usize,
398+
) -> Result<()>
399+
where
400+
F: ReadVolatile,
401+
{
402+
self.as_volatile_slice()?
403+
.read_exact_volatile_from(addr.0 as usize, src, count)
404+
.map_err(Into::into)
405+
}
406+
407+
fn write_volatile_to<F>(
408+
&self,
409+
addr: MemoryRegionAddress,
410+
dst: &mut F,
411+
count: usize,
412+
) -> Result<usize>
413+
where
414+
F: WriteVolatile,
415+
{
416+
self.as_volatile_slice()?
417+
.write_volatile_to(addr.0 as usize, dst, count)
418+
.map_err(Into::into)
419+
}
420+
421+
fn write_all_volatile_to<F>(
422+
&self,
423+
addr: MemoryRegionAddress,
424+
dst: &mut F,
425+
count: usize,
426+
) -> Result<()>
427+
where
428+
F: WriteVolatile,
429+
{
430+
self.as_volatile_slice()?
431+
.write_all_volatile_to(addr.0 as usize, dst, count)
432+
.map_err(Into::into)
433+
}
434+
435+
fn store<T: AtomicAccess>(
436+
&self,
437+
val: T,
438+
addr: MemoryRegionAddress,
439+
order: Ordering,
440+
) -> Result<()> {
441+
self.as_volatile_slice().and_then(|s| {
442+
s.store(val, addr.raw_value() as usize, order)
443+
.map_err(Into::into)
444+
})
445+
}
446+
447+
fn load<T: AtomicAccess>(&self, addr: MemoryRegionAddress, order: Ordering) -> Result<T> {
448+
self.as_volatile_slice()
449+
.and_then(|s| s.load(addr.raw_value() as usize, order).map_err(Into::into))
450+
}
451+
}

0 commit comments

Comments
 (0)