3
3
4
4
use super :: * ;
5
5
use crate :: table:: system_table_boot;
6
+ use core:: fmt:: { Debug , Display , Formatter } ;
6
7
use core:: ops:: { Index , IndexMut } ;
7
8
use core:: ptr:: NonNull ;
8
9
use core:: { mem, ptr} ;
9
10
use uefi_raw:: PhysicalAddress ;
10
11
12
+ /// Errors that may happen when constructing a [`MemoryMapRef`] or
13
+ /// [`MemoryMapRefMut`].
14
+ #[ derive( Copy , Clone , Debug ) ]
15
+ pub enum MemoryMapError {
16
+ /// The buffer is not 8-byte aligned.
17
+ Misaligned ,
18
+ /// The memory map size is invalid.
19
+ InvalidSize ,
20
+ }
21
+
22
+ impl Display for MemoryMapError {
23
+ fn fmt ( & self , f : & mut Formatter < ' _ > ) -> core:: fmt:: Result {
24
+ Debug :: fmt ( self , f)
25
+ }
26
+ }
27
+
28
+ #[ cfg( feature = "unstable" ) ]
29
+ impl core:: error:: Error for MemoryMapError { }
30
+
11
31
/// Implementation of [`MemoryMap`] for the given buffer.
12
32
#[ derive( Debug ) ]
13
- #[ allow( dead_code) ] // TODO: github.com/rust-osdev/uefi-rs/issues/1247
14
33
pub struct MemoryMapRef < ' a > {
15
34
buf : & ' a [ u8 ] ,
16
- key : MemoryMapKey ,
17
35
meta : MemoryMapMeta ,
18
36
len : usize ,
19
37
}
20
38
39
+ impl < ' a > MemoryMapRef < ' a > {
40
+ /// Constructs a new [`MemoryMapRef`].
41
+ ///
42
+ /// The underlying memory might contain an invalid/malformed memory map
43
+ /// which can't be checked during construction of this type. The entry
44
+ /// iterator might yield unexpected results.
45
+ pub fn new ( buffer : & ' a [ u8 ] , meta : MemoryMapMeta ) -> Result < Self , MemoryMapError > {
46
+ if buffer. as_ptr ( ) . align_offset ( 8 ) != 0 {
47
+ return Err ( MemoryMapError :: Misaligned ) ;
48
+ }
49
+ if buffer. len ( ) < meta. map_size {
50
+ return Err ( MemoryMapError :: InvalidSize ) ;
51
+ }
52
+ Ok ( Self {
53
+ buf : buffer,
54
+ meta,
55
+ len : meta. entry_count ( ) ,
56
+ } )
57
+ }
58
+ }
59
+
21
60
impl < ' a > MemoryMap for MemoryMapRef < ' a > {
22
61
fn meta ( & self ) -> MemoryMapMeta {
23
62
self . meta
24
63
}
25
64
26
65
fn key ( & self ) -> MemoryMapKey {
27
- self . key
66
+ self . meta . map_key
28
67
}
29
68
30
69
fn len ( & self ) -> usize {
@@ -55,18 +94,38 @@ impl Index<usize> for MemoryMapRef<'_> {
55
94
#[ derive( Debug ) ]
56
95
pub struct MemoryMapRefMut < ' a > {
57
96
buf : & ' a mut [ u8 ] ,
58
- key : MemoryMapKey ,
59
97
meta : MemoryMapMeta ,
60
98
len : usize ,
61
99
}
62
100
101
+ impl < ' a > MemoryMapRefMut < ' a > {
102
+ /// Constructs a new [`MemoryMapRefMut`].
103
+ ///
104
+ /// The underlying memory might contain an invalid/malformed memory map
105
+ /// which can't be checked during construction of this type. The entry
106
+ /// iterator might yield unexpected results.
107
+ pub fn new ( buffer : & ' a mut [ u8 ] , meta : MemoryMapMeta ) -> Result < Self , MemoryMapError > {
108
+ if buffer. as_ptr ( ) . align_offset ( 8 ) != 0 {
109
+ return Err ( MemoryMapError :: Misaligned ) ;
110
+ }
111
+ if buffer. len ( ) < meta. map_size {
112
+ return Err ( MemoryMapError :: InvalidSize ) ;
113
+ }
114
+ Ok ( Self {
115
+ buf : buffer,
116
+ meta,
117
+ len : meta. entry_count ( ) ,
118
+ } )
119
+ }
120
+ }
121
+
63
122
impl < ' a > MemoryMap for MemoryMapRefMut < ' a > {
64
123
fn meta ( & self ) -> MemoryMapMeta {
65
124
self . meta
66
125
}
67
126
68
127
fn key ( & self ) -> MemoryMapKey {
69
- self . key
128
+ self . meta . map_key
70
129
}
71
130
72
131
fn len ( & self ) -> usize {
@@ -241,10 +300,12 @@ impl MemoryMapBackingMemory {
241
300
Self ( slice)
242
301
}
243
302
303
+ /// INTERNAL, for unit tests.
304
+ ///
244
305
/// Creates an instance from the provided memory, which is not necessarily
245
306
/// on the UEFI heap.
246
307
#[ cfg( test) ]
247
- fn from_slice ( buffer : & mut [ u8 ] ) -> Self {
308
+ pub ( crate ) fn from_slice ( buffer : & mut [ u8 ] ) -> Self {
248
309
let len = buffer. len ( ) ;
249
310
unsafe { Self :: from_raw ( buffer. as_mut_ptr ( ) , len) }
250
311
}
@@ -287,6 +348,10 @@ impl Drop for MemoryMapBackingMemory {
287
348
log:: error!( "Failed to deallocate memory map: {e:?}" ) ;
288
349
}
289
350
} else {
351
+ #[ cfg( test) ]
352
+ log:: debug!( "Boot services are not available in unit tests." ) ;
353
+
354
+ #[ cfg( not( test) ) ]
290
355
log:: debug!( "Boot services are excited. Memory map won't be freed using the UEFI boot services allocator." ) ;
291
356
}
292
357
}
@@ -297,37 +362,18 @@ impl Drop for MemoryMapBackingMemory {
297
362
pub struct MemoryMapOwned {
298
363
/// Backing memory, properly initialized at this point.
299
364
pub ( crate ) buf : MemoryMapBackingMemory ,
300
- pub ( crate ) key : MemoryMapKey ,
301
365
pub ( crate ) meta : MemoryMapMeta ,
302
366
pub ( crate ) len : usize ,
303
367
}
304
368
305
369
impl MemoryMapOwned {
306
- /// Creates a [`MemoryMapOwned`] from the give initialized memory map behind
307
- /// the buffer and the reported `desc_size` from UEFI.
370
+ /// Creates a [`MemoryMapOwned`] from the given **initialized** memory map
371
+ /// (stored inside the provided buffer) and the corresponding
372
+ /// [`MemoryMapMeta`].
308
373
pub ( crate ) fn from_initialized_mem ( buf : MemoryMapBackingMemory , meta : MemoryMapMeta ) -> Self {
309
374
assert ! ( meta. desc_size >= mem:: size_of:: <MemoryDescriptor >( ) ) ;
310
375
let len = meta. entry_count ( ) ;
311
- MemoryMapOwned {
312
- key : MemoryMapKey ( 0 ) ,
313
- buf,
314
- meta,
315
- len,
316
- }
317
- }
318
-
319
- #[ cfg( test) ]
320
- pub ( super ) fn from_raw ( buf : & mut [ u8 ] , desc_size : usize ) -> Self {
321
- let mem = MemoryMapBackingMemory :: from_slice ( buf) ;
322
- Self :: from_initialized_mem (
323
- mem,
324
- MemoryMapMeta {
325
- map_size : buf. len ( ) ,
326
- desc_size,
327
- map_key : MemoryMapKey ( 0 ) ,
328
- desc_version : MemoryDescriptor :: VERSION ,
329
- } ,
330
- )
376
+ MemoryMapOwned { buf, meta, len }
331
377
}
332
378
}
333
379
@@ -337,7 +383,7 @@ impl MemoryMap for MemoryMapOwned {
337
383
}
338
384
339
385
fn key ( & self ) -> MemoryMapKey {
340
- self . key
386
+ self . meta . map_key
341
387
}
342
388
343
389
fn len ( & self ) -> usize {
@@ -360,7 +406,6 @@ impl MemoryMapMut for MemoryMapOwned {
360
406
fn sort ( & mut self ) {
361
407
let mut reference = MemoryMapRefMut {
362
408
buf : self . buf . as_mut_slice ( ) ,
363
- key : self . key ,
364
409
meta : self . meta ,
365
410
len : self . len ,
366
411
} ;
@@ -385,3 +430,103 @@ impl IndexMut<usize> for MemoryMapOwned {
385
430
self . get_mut ( index) . unwrap ( )
386
431
}
387
432
}
433
+
434
+ #[ cfg( test) ]
435
+ mod tests {
436
+ use super :: * ;
437
+ use alloc:: vec:: Vec ;
438
+ use core:: mem:: size_of;
439
+
440
+ const BASE_MMAP_UNSORTED : [ MemoryDescriptor ; 3 ] = [
441
+ MemoryDescriptor {
442
+ ty : MemoryType :: CONVENTIONAL ,
443
+ phys_start : 0x3000 ,
444
+ virt_start : 0x3000 ,
445
+ page_count : 1 ,
446
+ att : MemoryAttribute :: WRITE_BACK ,
447
+ } ,
448
+ MemoryDescriptor {
449
+ ty : MemoryType :: CONVENTIONAL ,
450
+ phys_start : 0x2000 ,
451
+ virt_start : 0x2000 ,
452
+ page_count : 1 ,
453
+ att : MemoryAttribute :: WRITE_BACK ,
454
+ } ,
455
+ MemoryDescriptor {
456
+ ty : MemoryType :: CONVENTIONAL ,
457
+ phys_start : 0x1000 ,
458
+ virt_start : 0x1000 ,
459
+ page_count : 1 ,
460
+ att : MemoryAttribute :: WRITE_BACK ,
461
+ } ,
462
+ ] ;
463
+
464
+ /// Returns a copy of [`BASE_MMAP_UNSORTED`] owned on the stack.
465
+ fn new_mmap_memory ( ) -> [ MemoryDescriptor ; 3 ] {
466
+ BASE_MMAP_UNSORTED
467
+ }
468
+
469
+ fn mmap_raw < ' a > ( memory : & mut [ MemoryDescriptor ] ) -> ( & ' a mut [ u8 ] , MemoryMapMeta ) {
470
+ let desc_size = size_of :: < MemoryDescriptor > ( ) ;
471
+ let len = memory. len ( ) * desc_size;
472
+ let ptr = memory. as_mut_ptr ( ) . cast :: < u8 > ( ) ;
473
+ let slice = unsafe { core:: slice:: from_raw_parts_mut ( ptr, len) } ;
474
+ let meta = MemoryMapMeta {
475
+ map_size : len,
476
+ desc_size,
477
+ map_key : Default :: default ( ) ,
478
+ desc_version : MemoryDescriptor :: VERSION ,
479
+ } ;
480
+ ( slice, meta)
481
+ }
482
+
483
+ /// Basic sanity checks for the type [`MemoryMapRef`].
484
+ #[ test]
485
+ fn memory_map_ref ( ) {
486
+ let mut memory = new_mmap_memory ( ) ;
487
+ let ( mmap, meta) = mmap_raw ( & mut memory) ;
488
+ let mmap = MemoryMapRef :: new ( mmap, meta) . unwrap ( ) ;
489
+
490
+ assert_eq ! ( mmap. entries( ) . count( ) , 3 ) ;
491
+ assert_eq ! (
492
+ mmap. entries( ) . copied( ) . collect:: <Vec <_>>( ) . as_slice( ) ,
493
+ & BASE_MMAP_UNSORTED
494
+ ) ;
495
+ assert ! ( !mmap. is_sorted( ) ) ;
496
+ }
497
+
498
+ /// Basic sanity checks for the type [`MemoryMapRefMut`].
499
+ #[ test]
500
+ fn memory_map_ref_mut ( ) {
501
+ let mut memory = new_mmap_memory ( ) ;
502
+ let ( mmap, meta) = mmap_raw ( & mut memory) ;
503
+ let mut mmap = MemoryMapRefMut :: new ( mmap, meta) . unwrap ( ) ;
504
+
505
+ assert_eq ! ( mmap. entries( ) . count( ) , 3 ) ;
506
+ assert_eq ! (
507
+ mmap. entries( ) . copied( ) . collect:: <Vec <_>>( ) . as_slice( ) ,
508
+ & BASE_MMAP_UNSORTED
509
+ ) ;
510
+ assert ! ( !mmap. is_sorted( ) ) ;
511
+ mmap. sort ( ) ;
512
+ assert ! ( mmap. is_sorted( ) ) ;
513
+ }
514
+
515
+ /// Basic sanity checks for the type [`MemoryMapOwned`].
516
+ #[ test]
517
+ fn memory_map_owned ( ) {
518
+ let mut memory = new_mmap_memory ( ) ;
519
+ let ( mmap, meta) = mmap_raw ( & mut memory) ;
520
+ let mmap = MemoryMapBackingMemory :: from_slice ( mmap) ;
521
+ let mut mmap = MemoryMapOwned :: from_initialized_mem ( mmap, meta) ;
522
+
523
+ assert_eq ! ( mmap. entries( ) . count( ) , 3 ) ;
524
+ assert_eq ! (
525
+ mmap. entries( ) . copied( ) . collect:: <Vec <_>>( ) . as_slice( ) ,
526
+ & BASE_MMAP_UNSORTED
527
+ ) ;
528
+ assert ! ( !mmap. is_sorted( ) ) ;
529
+ mmap. sort ( ) ;
530
+ assert ! ( mmap. is_sorted( ) ) ;
531
+ }
532
+ }
0 commit comments