Skip to content

Commit f0778e1

Browse files
committed
Provide generic impl Bytes for R: GuestMemoryRegion
This trait implementation of GuestMemoryRegion for `GuestRegionMmap`does not actually make use of the specifics of `GuestRegionMmap`, so can be completely generic in terms of `GuestMemoryRegion`. This allows us to move it to guest_memory.rs, eliminating one further instance of code depending on exactly one of mmap_unix.rs and mmap_xen.rs being compiled in. However, Paolo pointed out that sometimes this default impl might not be desired, for example QEMU might want some GuestMemoryRegion impl that represents PCI BARs. So hide this impl behind a marker trait, GuestMemoryRegionBytes, being implemented. Replace .unwrap() with error propagation via `?`, as we no longer know that we are dealing with a specific GuestMemoryRegion impl that always implements as_volatile_slice(). Signed-off-by: Patrick Roy <[email protected]>
1 parent fb42f50 commit f0778e1

File tree

3 files changed

+163
-158
lines changed

3 files changed

+163
-158
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,8 @@
1414
`write_volatile_to` and `write_all_volatile_to` functions from the `GuestMemory` trait to the `Bytes` trait.
1515
- \[[#312](https://github.com/rust-vmm/vm-memory/pull/312)\]: Give `GuestMemory::find_region` a default implementation,
1616
based on linear search.
17+
- \[[#312](https://github.com/rust-vmm/vm-memory/pull/312)\]: Implement `Bytes<MemoryRegionAddress>` generically
18+
for all `R: GuestMemoryRegion`.
1719

1820
- \[#324](https:////github.com/rust-vmm/vm-memory/pull/324)\] `GuestMemoryRegion::bitmap()` now returns a `BitmapSlice`. Accessing the full bitmap is now possible only if the type of the memory region is know, for example with `MmapRegion::bitmap()`.
1921

src/mmap/mod.rs

Lines changed: 5 additions & 152 deletions
Original file line numberDiff line numberDiff line change
@@ -15,14 +15,13 @@
1515
use std::borrow::Borrow;
1616
use std::ops::Deref;
1717
use std::result;
18-
use std::sync::atomic::Ordering;
1918

2019
use crate::address::Address;
2120
use crate::bitmap::{Bitmap, BS};
2221
use crate::guest_memory::{self, FileOffset, GuestAddress, GuestUsize, MemoryRegionAddress};
23-
use crate::region::GuestMemoryRegion;
22+
use crate::region::{GuestMemoryRegion, GuestMemoryRegionBytes};
2423
use crate::volatile_memory::{VolatileMemory, VolatileSlice};
25-
use crate::{AtomicAccess, Bytes, Error, GuestRegionCollection, ReadVolatile, WriteVolatile};
24+
use crate::{Error, GuestRegionCollection};
2625

2726
// re-export for backward compat, as the trait used to be defined in mmap.rs
2827
pub use crate::bitmap::NewBitmap;
@@ -116,154 +115,6 @@ impl<B: NewBitmap> GuestRegionMmap<B> {
116115
}
117116
}
118117

119-
impl<B: Bitmap> Bytes<MemoryRegionAddress> for GuestRegionMmap<B> {
120-
type E = guest_memory::Error;
121-
122-
/// # Examples
123-
/// * Write a slice at guest address 0x1200.
124-
///
125-
/// ```
126-
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
127-
/// #
128-
/// # let start_addr = GuestAddress(0x1000);
129-
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
130-
/// # .expect("Could not create guest memory");
131-
/// #
132-
/// let res = gm
133-
/// .write(&[1, 2, 3, 4, 5], GuestAddress(0x1200))
134-
/// .expect("Could not write to guest memory");
135-
/// assert_eq!(5, res);
136-
/// ```
137-
fn write(&self, buf: &[u8], addr: MemoryRegionAddress) -> guest_memory::Result<usize> {
138-
let maddr = addr.raw_value() as usize;
139-
self.as_volatile_slice()
140-
.unwrap()
141-
.write(buf, maddr)
142-
.map_err(Into::into)
143-
}
144-
145-
/// # Examples
146-
/// * Read a slice of length 16 at guestaddress 0x1200.
147-
///
148-
/// ```
149-
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
150-
/// #
151-
/// # let start_addr = GuestAddress(0x1000);
152-
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
153-
/// # .expect("Could not create guest memory");
154-
/// #
155-
/// let buf = &mut [0u8; 16];
156-
/// let res = gm
157-
/// .read(buf, GuestAddress(0x1200))
158-
/// .expect("Could not read from guest memory");
159-
/// assert_eq!(16, res);
160-
/// ```
161-
fn read(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> guest_memory::Result<usize> {
162-
let maddr = addr.raw_value() as usize;
163-
self.as_volatile_slice()
164-
.unwrap()
165-
.read(buf, maddr)
166-
.map_err(Into::into)
167-
}
168-
169-
fn write_slice(&self, buf: &[u8], addr: MemoryRegionAddress) -> guest_memory::Result<()> {
170-
let maddr = addr.raw_value() as usize;
171-
self.as_volatile_slice()
172-
.unwrap()
173-
.write_slice(buf, maddr)
174-
.map_err(Into::into)
175-
}
176-
177-
fn read_slice(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> guest_memory::Result<()> {
178-
let maddr = addr.raw_value() as usize;
179-
self.as_volatile_slice()
180-
.unwrap()
181-
.read_slice(buf, maddr)
182-
.map_err(Into::into)
183-
}
184-
185-
fn read_volatile_from<F>(
186-
&self,
187-
addr: MemoryRegionAddress,
188-
src: &mut F,
189-
count: usize,
190-
) -> Result<usize, Self::E>
191-
where
192-
F: ReadVolatile,
193-
{
194-
self.as_volatile_slice()
195-
.unwrap()
196-
.read_volatile_from(addr.0 as usize, src, count)
197-
.map_err(Into::into)
198-
}
199-
200-
fn read_exact_volatile_from<F>(
201-
&self,
202-
addr: MemoryRegionAddress,
203-
src: &mut F,
204-
count: usize,
205-
) -> Result<(), Self::E>
206-
where
207-
F: ReadVolatile,
208-
{
209-
self.as_volatile_slice()
210-
.unwrap()
211-
.read_exact_volatile_from(addr.0 as usize, src, count)
212-
.map_err(Into::into)
213-
}
214-
215-
fn write_volatile_to<F>(
216-
&self,
217-
addr: MemoryRegionAddress,
218-
dst: &mut F,
219-
count: usize,
220-
) -> Result<usize, Self::E>
221-
where
222-
F: WriteVolatile,
223-
{
224-
self.as_volatile_slice()
225-
.unwrap()
226-
.write_volatile_to(addr.0 as usize, dst, count)
227-
.map_err(Into::into)
228-
}
229-
230-
fn write_all_volatile_to<F>(
231-
&self,
232-
addr: MemoryRegionAddress,
233-
dst: &mut F,
234-
count: usize,
235-
) -> Result<(), Self::E>
236-
where
237-
F: WriteVolatile,
238-
{
239-
self.as_volatile_slice()
240-
.unwrap()
241-
.write_all_volatile_to(addr.0 as usize, dst, count)
242-
.map_err(Into::into)
243-
}
244-
245-
fn store<T: AtomicAccess>(
246-
&self,
247-
val: T,
248-
addr: MemoryRegionAddress,
249-
order: Ordering,
250-
) -> guest_memory::Result<()> {
251-
self.as_volatile_slice().and_then(|s| {
252-
s.store(val, addr.raw_value() as usize, order)
253-
.map_err(Into::into)
254-
})
255-
}
256-
257-
fn load<T: AtomicAccess>(
258-
&self,
259-
addr: MemoryRegionAddress,
260-
order: Ordering,
261-
) -> guest_memory::Result<T> {
262-
self.as_volatile_slice()
263-
.and_then(|s| s.load(addr.raw_value() as usize, order).map_err(Into::into))
264-
}
265-
}
266-
267118
impl<B: Bitmap> GuestMemoryRegion for GuestRegionMmap<B> {
268119
type B = B;
269120

@@ -310,6 +161,8 @@ impl<B: Bitmap> GuestMemoryRegion for GuestRegionMmap<B> {
310161
}
311162
}
312163

164+
impl<B: Bitmap> GuestMemoryRegionBytes for GuestRegionMmap<B> {}
165+
313166
/// [`GuestMemory`](trait.GuestMemory.html) implementation that mmaps the guest's memory
314167
/// in the current process.
315168
///
@@ -355,7 +208,7 @@ mod tests {
355208

356209
use crate::bitmap::tests::test_guest_memory_and_region;
357210
use crate::bitmap::AtomicBitmap;
358-
use crate::{Error, GuestAddressSpace, GuestMemory, GuestMemoryError};
211+
use crate::{Bytes, Error, GuestAddressSpace, GuestMemory, GuestMemoryError};
359212

360213
use std::io::Write;
361214
use std::mem;

src/region.rs

Lines changed: 156 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,17 @@
11
//! Module containing abstracts for dealing with contiguous regions of guest memory
22
33
use crate::bitmap::{Bitmap, BS};
4-
use crate::guest_memory::Error;
54
use crate::guest_memory::Result;
65
use crate::{
7-
Address, Bytes, FileOffset, GuestAddress, GuestMemory, GuestUsize, MemoryRegionAddress,
8-
VolatileSlice,
6+
Address, AtomicAccess, Bytes, FileOffset, GuestAddress, GuestMemory, GuestMemoryError,
7+
GuestUsize, MemoryRegionAddress, ReadVolatile, VolatileSlice, WriteVolatile,
98
};
9+
use std::sync::atomic::Ordering;
1010
use std::sync::Arc;
1111

1212
/// Represents a continuous region of guest physical memory.
1313
#[allow(clippy::len_without_is_empty)]
14-
pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
14+
pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = GuestMemoryError> {
1515
/// Type used for dirty memory tracking.
1616
type B: Bitmap;
1717

@@ -73,7 +73,7 @@ pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
7373
/// Rust memory safety model. It's the caller's responsibility to ensure that there's no
7474
/// concurrent accesses to the underlying guest memory.
7575
fn get_host_address(&self, _addr: MemoryRegionAddress) -> Result<*mut u8> {
76-
Err(Error::HostAddressNotAvailable)
76+
Err(GuestMemoryError::HostAddressNotAvailable)
7777
}
7878

7979
/// Returns information regarding the file and offset backing this memory region.
@@ -89,7 +89,7 @@ pub trait GuestMemoryRegion: Bytes<MemoryRegionAddress, E = Error> {
8989
offset: MemoryRegionAddress,
9090
count: usize,
9191
) -> Result<VolatileSlice<BS<Self::B>>> {
92-
Err(Error::HostAddressNotAvailable)
92+
Err(GuestMemoryError::HostAddressNotAvailable)
9393
}
9494

9595
/// Gets a slice of memory for the entire region that supports volatile access.
@@ -299,3 +299,153 @@ impl<R: GuestMemoryRegion> GuestMemory for GuestRegionCollection<R> {
299299
self.regions.iter().map(AsRef::as_ref)
300300
}
301301
}
302+
303+
/// A marker trait that if implemented on a type `R` makes available a default
304+
/// implementation of `Bytes<MemoryRegionAddress>` for `R`, based on the assumption
305+
/// that the entire `GuestMemoryRegion` is just traditional memory without any
306+
/// special access requirements.
307+
pub trait GuestMemoryRegionBytes: GuestMemoryRegion {}
308+
309+
impl<R: GuestMemoryRegionBytes> Bytes<MemoryRegionAddress> for R {
310+
type E = GuestMemoryError;
311+
312+
/// # Examples
313+
/// * Write a slice at guest address 0x1200.
314+
///
315+
/// ```
316+
/// # #[cfg(feature = "backend-mmap")]
317+
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
318+
/// #
319+
/// # #[cfg(feature = "backend-mmap")]
320+
/// # {
321+
/// # let start_addr = GuestAddress(0x1000);
322+
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
323+
/// # .expect("Could not create guest memory");
324+
/// #
325+
/// let res = gm
326+
/// .write(&[1, 2, 3, 4, 5], GuestAddress(0x1200))
327+
/// .expect("Could not write to guest memory");
328+
/// assert_eq!(5, res);
329+
/// # }
330+
/// ```
331+
fn write(&self, buf: &[u8], addr: MemoryRegionAddress) -> Result<usize> {
332+
let maddr = addr.raw_value() as usize;
333+
self.as_volatile_slice()?
334+
.write(buf, maddr)
335+
.map_err(Into::into)
336+
}
337+
338+
/// # Examples
339+
/// * Read a slice of length 16 at guestaddress 0x1200.
340+
///
341+
/// ```
342+
/// # #[cfg(feature = "backend-mmap")]
343+
/// # use vm_memory::{Bytes, GuestAddress, GuestMemoryMmap};
344+
/// #
345+
/// # #[cfg(feature = "backend-mmap")]
346+
/// # {
347+
/// # let start_addr = GuestAddress(0x1000);
348+
/// # let mut gm = GuestMemoryMmap::<()>::from_ranges(&vec![(start_addr, 0x400)])
349+
/// # .expect("Could not create guest memory");
350+
/// #
351+
/// let buf = &mut [0u8; 16];
352+
/// let res = gm
353+
/// .read(buf, GuestAddress(0x1200))
354+
/// .expect("Could not read from guest memory");
355+
/// assert_eq!(16, res);
356+
/// # }
357+
/// ```
358+
fn read(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> Result<usize> {
359+
let maddr = addr.raw_value() as usize;
360+
self.as_volatile_slice()?
361+
.read(buf, maddr)
362+
.map_err(Into::into)
363+
}
364+
365+
fn write_slice(&self, buf: &[u8], addr: MemoryRegionAddress) -> Result<()> {
366+
let maddr = addr.raw_value() as usize;
367+
self.as_volatile_slice()?
368+
.write_slice(buf, maddr)
369+
.map_err(Into::into)
370+
}
371+
372+
fn read_slice(&self, buf: &mut [u8], addr: MemoryRegionAddress) -> Result<()> {
373+
let maddr = addr.raw_value() as usize;
374+
self.as_volatile_slice()?
375+
.read_slice(buf, maddr)
376+
.map_err(Into::into)
377+
}
378+
379+
fn read_volatile_from<F>(
380+
&self,
381+
addr: MemoryRegionAddress,
382+
src: &mut F,
383+
count: usize,
384+
) -> Result<usize>
385+
where
386+
F: ReadVolatile,
387+
{
388+
self.as_volatile_slice()?
389+
.read_volatile_from(addr.0 as usize, src, count)
390+
.map_err(Into::into)
391+
}
392+
393+
fn read_exact_volatile_from<F>(
394+
&self,
395+
addr: MemoryRegionAddress,
396+
src: &mut F,
397+
count: usize,
398+
) -> Result<()>
399+
where
400+
F: ReadVolatile,
401+
{
402+
self.as_volatile_slice()?
403+
.read_exact_volatile_from(addr.0 as usize, src, count)
404+
.map_err(Into::into)
405+
}
406+
407+
fn write_volatile_to<F>(
408+
&self,
409+
addr: MemoryRegionAddress,
410+
dst: &mut F,
411+
count: usize,
412+
) -> Result<usize>
413+
where
414+
F: WriteVolatile,
415+
{
416+
self.as_volatile_slice()?
417+
.write_volatile_to(addr.0 as usize, dst, count)
418+
.map_err(Into::into)
419+
}
420+
421+
fn write_all_volatile_to<F>(
422+
&self,
423+
addr: MemoryRegionAddress,
424+
dst: &mut F,
425+
count: usize,
426+
) -> Result<()>
427+
where
428+
F: WriteVolatile,
429+
{
430+
self.as_volatile_slice()?
431+
.write_all_volatile_to(addr.0 as usize, dst, count)
432+
.map_err(Into::into)
433+
}
434+
435+
fn store<T: AtomicAccess>(
436+
&self,
437+
val: T,
438+
addr: MemoryRegionAddress,
439+
order: Ordering,
440+
) -> Result<()> {
441+
self.as_volatile_slice().and_then(|s| {
442+
s.store(val, addr.raw_value() as usize, order)
443+
.map_err(Into::into)
444+
})
445+
}
446+
447+
fn load<T: AtomicAccess>(&self, addr: MemoryRegionAddress, order: Ordering) -> Result<T> {
448+
self.as_volatile_slice()
449+
.and_then(|s| s.load(addr.raw_value() as usize, order).map_err(Into::into))
450+
}
451+
}

0 commit comments

Comments
 (0)