@@ -114,7 +114,7 @@ impl<'a, M: GuestMemory> DescriptorChain<'a, M> {
114
114
let desc_head = desc_table. unchecked_add ( u64:: from ( index) * 16 ) ;
115
115
116
116
// These reads can't fail unless Guest memory is hopelessly broken.
117
- let desc = match mem. read_obj :: < Descriptor > ( desc_head) {
117
+ let desc = match mem. load_obj :: < Descriptor > ( desc_head) {
118
118
Ok ( ret) => ret,
119
119
Err ( err) => {
120
120
error ! (
@@ -511,7 +511,7 @@ impl Queue {
511
511
// guest after device activation, so we can be certain that no change has
512
512
// occurred since the last `self.is_valid()` check.
513
513
let addr = self . avail_ring . unchecked_add ( 2 ) ;
514
- Wrapping ( mem. read_obj :: < u16 > ( addr) . unwrap ( ) )
514
+ Wrapping ( mem. load_obj :: < u16 > ( addr) . unwrap ( ) )
515
515
}
516
516
517
517
/// Get the value of the used event field of the avail ring.
@@ -524,7 +524,7 @@ impl Queue {
524
524
. avail_ring
525
525
. unchecked_add ( u64:: from ( 4 + 2 * self . actual_size ( ) ) ) ;
526
526
527
- Wrapping ( mem. read_obj :: < u16 > ( used_event_addr) . unwrap ( ) )
527
+ Wrapping ( mem. load_obj :: < u16 > ( used_event_addr) . unwrap ( ) )
528
528
}
529
529
530
530
/// Helper method that writes to the `avail_event` field of the used ring.
@@ -643,6 +643,28 @@ impl Queue {
643
643
}
644
644
}
645
645
646
+ trait MemBytesExt : GuestMemory {
647
+ /// Load a object `T` from GPA.
648
+ ///
649
+ /// Usually used for very small items.
650
+ #[ inline( always) ]
651
+ fn load_obj < T : ByteValued > (
652
+ & self ,
653
+ addr : GuestAddress ,
654
+ ) -> Result < T , <Self as Bytes < GuestAddress > >:: E > {
655
+ if let Ok ( s) = self . get_slice ( addr, std:: mem:: size_of :: < T > ( ) ) {
656
+ let ptr = s. ptr_guard ( ) . as_ptr ( ) . cast :: < T > ( ) ;
657
+ if ptr. is_aligned ( ) {
658
+ // SAFETY: We just checked that the slice is of the correct size and require it impl
659
+ // ByteValued, also, the pointer is aligned.
660
+ return Ok ( unsafe { ptr. read_volatile ( ) } ) ;
661
+ }
662
+ }
663
+ self . read_obj :: < T > ( addr)
664
+ }
665
+ }
666
+ impl < T : GuestMemory > MemBytesExt for T { }
667
+
646
668
#[ cfg( kani) ]
647
669
#[ allow( dead_code) ]
648
670
mod verification {
@@ -1161,10 +1183,25 @@ mod tests {
1161
1183
. used_ring
1162
1184
. unchecked_add ( u64:: from ( 4 + 8 * self . actual_size ( ) ) ) ;
1163
1185
1164
- mem. read_obj :: < u16 > ( avail_event_addr) . unwrap ( )
1186
+ mem. load_obj :: < u16 > ( avail_event_addr) . unwrap ( )
1165
1187
}
1166
1188
}
1167
1189
1190
+ #[ test]
1191
+ fn test_load_obj ( ) {
1192
+ let m = & multi_region_mem ( & [ ( GuestAddress ( 0 ) , 0x10000 ) , ( GuestAddress ( 0x20000 ) , 0x2000 ) ] ) ;
1193
+ // normal write and read
1194
+ m. write_obj :: < u32 > ( 0xdeadbeef , GuestAddress ( 0 ) ) . unwrap ( ) ;
1195
+ assert_eq ! ( m. load_obj:: <u32 >( GuestAddress ( 0 ) ) . unwrap( ) , 0xdeadbeef ) ;
1196
+ // unaligned read
1197
+ m. write_obj :: < u32 > ( 0xcafebabe , GuestAddress ( 1 ) ) . unwrap ( ) ;
1198
+ assert_eq ! ( m. load_obj:: <u32 >( GuestAddress ( 1 ) ) . unwrap( ) , 0xcafebabe ) ;
1199
+ // read across regions
1200
+ m. write_obj :: < u32 > ( 0xdeadbeef , GuestAddress ( 0x1fff ) )
1201
+ . unwrap ( ) ;
1202
+ assert_eq ! ( m. load_obj:: <u32 >( GuestAddress ( 0x1fff ) ) . unwrap( ) , 0xdeadbeef ) ;
1203
+ }
1204
+
1168
1205
#[ test]
1169
1206
fn test_checked_new_descriptor_chain ( ) {
1170
1207
let m = & multi_region_mem ( & [ ( GuestAddress ( 0 ) , 0x10000 ) , ( GuestAddress ( 0x20000 ) , 0x2000 ) ] ) ;
0 commit comments