@@ -114,7 +114,7 @@ impl<'a, M: GuestMemory> DescriptorChain<'a, M> {
114114 let desc_head = desc_table. unchecked_add ( u64:: from ( index) * 16 ) ;
115115
116116 // These reads can't fail unless Guest memory is hopelessly broken.
117- let desc = match mem. read_obj :: < Descriptor > ( desc_head) {
117+ let desc = match mem. load_obj :: < Descriptor > ( desc_head) {
118118 Ok ( ret) => ret,
119119 Err ( err) => {
120120 error ! (
@@ -427,7 +427,7 @@ impl Queue {
427427 // and virtq rings, so it's safe to unwrap guest memory reads and to use unchecked
428428 // offsets.
429429 let desc_index: u16 = mem
430- . read_obj ( self . avail_ring . unchecked_add ( u64:: from ( index_offset) ) )
430+ . load_obj ( self . avail_ring . unchecked_add ( u64:: from ( index_offset) ) )
431431 . unwrap ( ) ;
432432
433433 DescriptorChain :: checked_new ( mem, self . desc_table , self . actual_size ( ) , desc_index) . map (
@@ -511,7 +511,7 @@ impl Queue {
511511 // guest after device activation, so we can be certain that no change has
512512 // occurred since the last `self.is_valid()` check.
513513 let addr = self . avail_ring . unchecked_add ( 2 ) ;
514- Wrapping ( mem. read_obj :: < u16 > ( addr) . unwrap ( ) )
514+ Wrapping ( mem. load_obj :: < u16 > ( addr) . unwrap ( ) )
515515 }
516516
517517 /// Get the value of the used event field of the avail ring.
@@ -524,7 +524,7 @@ impl Queue {
524524 . avail_ring
525525 . unchecked_add ( u64:: from ( 4 + 2 * self . actual_size ( ) ) ) ;
526526
527- Wrapping ( mem. read_obj :: < u16 > ( used_event_addr) . unwrap ( ) )
527+ Wrapping ( mem. load_obj :: < u16 > ( used_event_addr) . unwrap ( ) )
528528 }
529529
530530 /// Helper method that writes to the `avail_event` field of the used ring.
@@ -643,6 +643,28 @@ impl Queue {
643643 }
644644}
645645
646+ trait MemBytesExt : GuestMemory {
647+ /// Load a object `T` from GPA.
648+ ///
649+ /// Usually used for very small items.
650+ #[ inline( always) ]
651+ fn load_obj < T : ByteValued > (
652+ & self ,
653+ addr : GuestAddress ,
654+ ) -> Result < T , <Self as Bytes < GuestAddress > >:: E > {
655+ if let Ok ( s) = self . get_slice ( addr, std:: mem:: size_of :: < T > ( ) ) {
656+ let ptr = s. ptr_guard ( ) . as_ptr ( ) . cast :: < T > ( ) ;
657+ if ptr. is_aligned ( ) {
658+ // SAFETY: We just checked that the slice is of the correct size and require it impl
659+ // ByteValued, also, the pointer is aligned.
660+ return Ok ( unsafe { ptr. read_volatile ( ) } ) ;
661+ }
662+ }
663+ self . read_obj :: < T > ( addr)
664+ }
665+ }
666+ impl < T : GuestMemory > MemBytesExt for T { }
667+
646668#[ cfg( kani) ]
647669#[ allow( dead_code) ]
648670mod verification {
@@ -1161,10 +1183,25 @@ mod tests {
11611183 . used_ring
11621184 . unchecked_add ( u64:: from ( 4 + 8 * self . actual_size ( ) ) ) ;
11631185
1164- mem. read_obj :: < u16 > ( avail_event_addr) . unwrap ( )
1186+ mem. load_obj :: < u16 > ( avail_event_addr) . unwrap ( )
11651187 }
11661188 }
11671189
1190+ #[ test]
1191+ fn test_load_obj ( ) {
1192+ let m = & multi_region_mem ( & [ ( GuestAddress ( 0 ) , 0x10000 ) , ( GuestAddress ( 0x20000 ) , 0x2000 ) ] ) ;
1193+ // normal write and read
1194+ m. write_obj :: < u32 > ( 0xdeadbeef , GuestAddress ( 0 ) ) . unwrap ( ) ;
1195+ assert_eq ! ( m. load_obj:: <u32 >( GuestAddress ( 0 ) ) . unwrap( ) , 0xdeadbeef ) ;
1196+ // unaligned read
1197+ m. write_obj :: < u32 > ( 0xcafebabe , GuestAddress ( 1 ) ) . unwrap ( ) ;
1198+ assert_eq ! ( m. load_obj:: <u32 >( GuestAddress ( 1 ) ) . unwrap( ) , 0xcafebabe ) ;
1199+ // read across regions
1200+ m. write_obj :: < u32 > ( 0xdeadbeef , GuestAddress ( 0x1fff ) )
1201+ . unwrap ( ) ;
1202+ assert_eq ! ( m. load_obj:: <u32 >( GuestAddress ( 0x1fff ) ) . unwrap( ) , 0xdeadbeef ) ;
1203+ }
1204+
11681205 #[ test]
11691206 fn test_checked_new_descriptor_chain ( ) {
11701207 let m = & multi_region_mem ( & [ ( GuestAddress ( 0 ) , 0x10000 ) , ( GuestAddress ( 0x20000 ) , 0x2000 ) ] ) ;
0 commit comments