1
1
// ignore-tidy-filelength
2
- // ignore-tidy-undocumented-unsafe
3
2
4
3
//! Slice management and manipulation.
5
4
//!
@@ -70,6 +69,8 @@ impl<T> [T] {
70
69
#[ allow( unused_attributes) ]
71
70
#[ allow_internal_unstable( const_fn_union) ]
72
71
pub const fn len ( & self ) -> usize {
72
+ // SAFETY: this is safe because `&[T]` and `FatPtr<T>` have the same layout.
73
+ // Only `std` can make this guarantee.
73
74
unsafe { crate :: ptr:: Repr { rust : self } . raw . len }
74
75
}
75
76
@@ -437,7 +438,8 @@ impl<T> [T] {
437
438
#[ unstable( feature = "slice_ptr_range" , issue = "65807" ) ]
438
439
#[ inline]
439
440
pub fn as_ptr_range ( & self ) -> Range < * const T > {
440
- // The `add` here is safe, because:
441
+ let start = self . as_ptr ( ) ;
442
+ // SAFETY: The `add` here is safe, because:
441
443
//
442
444
// - Both pointers are part of the same object, as pointing directly
443
445
// past the object also counts.
@@ -454,7 +456,6 @@ impl<T> [T] {
454
456
// the end of the address space.
455
457
//
456
458
// See the documentation of pointer::add.
457
- let start = self . as_ptr ( ) ;
458
459
let end = unsafe { start. add ( self . len ( ) ) } ;
459
460
start..end
460
461
}
@@ -478,8 +479,8 @@ impl<T> [T] {
478
479
#[ unstable( feature = "slice_ptr_range" , issue = "65807" ) ]
479
480
#[ inline]
480
481
pub fn as_mut_ptr_range ( & mut self ) -> Range < * mut T > {
481
- // See as_ptr_range() above for why `add` here is safe.
482
482
let start = self . as_mut_ptr ( ) ;
483
+ // SAFETY: See as_ptr_range() above for why `add` here is safe.
483
484
let end = unsafe { start. add ( self . len ( ) ) } ;
484
485
start..end
485
486
}
@@ -505,6 +506,8 @@ impl<T> [T] {
505
506
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
506
507
#[ inline]
507
508
pub fn swap ( & mut self , a : usize , b : usize ) {
509
+ // SAFETY: `pa` and `pb` have been created from safe mutable references and refer
510
+ // to elements in the slice and therefore are guaranteed to be valid and aligned.
508
511
unsafe {
509
512
// Can't take two mutable loans from one vector, so instead just cast
510
513
// them to their raw pointers to do the swap
@@ -548,6 +551,10 @@ impl<T> [T] {
548
551
// Use the llvm.bswap intrinsic to reverse u8s in a usize
549
552
let chunk = mem:: size_of :: < usize > ( ) ;
550
553
while i + chunk - 1 < ln / 2 {
554
+ // SAFETY: the condition of the `while` guarantees that
555
+ // `i` and `ln - i - chunk` are inside the slice.
556
+ // The resulting pointers `pa` and `pb` are therefore valid,
557
+ // and can be read from and written to.
551
558
unsafe {
552
559
let pa: * mut T = self . get_unchecked_mut ( i) ;
553
560
let pb: * mut T = self . get_unchecked_mut ( ln - i - chunk) ;
@@ -564,6 +571,10 @@ impl<T> [T] {
564
571
// Use rotate-by-16 to reverse u16s in a u32
565
572
let chunk = mem:: size_of :: < u32 > ( ) / 2 ;
566
573
while i + chunk - 1 < ln / 2 {
574
+ // SAFETY: the condition of the `while` guarantees that
575
+ // `i` and `ln - i - chunk` are inside the slice.
576
+ // The resulting pointers `pa` and `pb` are therefore valid,
577
+ // and can be read from and written to.
567
578
unsafe {
568
579
let pa: * mut T = self . get_unchecked_mut ( i) ;
569
580
let pb: * mut T = self . get_unchecked_mut ( ln - i - chunk) ;
@@ -577,8 +588,12 @@ impl<T> [T] {
577
588
}
578
589
579
590
while i < ln / 2 {
580
- // Unsafe swap to avoid the bounds check in safe swap.
591
+ // SAFETY: the condition of the `while` guarantees that `i` and `ln - i - 1`
592
+ // are inside the slice and refer to an element inside the slice.
593
+ // The resulting pointers `pa` and `pb` are therefore valid and aligned,
594
+ // and can be read from and written to.
581
595
unsafe {
596
+ // Unsafe swap to avoid the bounds check in safe swap.
582
597
let pa: * mut T = self . get_unchecked_mut ( i) ;
583
598
let pb: * mut T = self . get_unchecked_mut ( ln - i - 1 ) ;
584
599
ptr:: swap ( pa, pb) ;
@@ -603,6 +618,9 @@ impl<T> [T] {
603
618
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
604
619
#[ inline]
605
620
pub fn iter ( & self ) -> Iter < ' _ , T > {
621
+ // SAFETY: adding `self.len()` to the starting pointer gives a pointer
622
+ // at the end of `self`, which fulfills the expectations of `ptr.add()`
623
+ // and `NonNull::new_unchecked()`.
606
624
unsafe {
607
625
let ptr = self . as_ptr ( ) ;
608
626
assume ( !ptr. is_null ( ) ) ;
@@ -631,6 +649,9 @@ impl<T> [T] {
631
649
#[ stable( feature = "rust1" , since = "1.0.0" ) ]
632
650
#[ inline]
633
651
pub fn iter_mut ( & mut self ) -> IterMut < ' _ , T > {
652
+ // SAFETY: adding `self.len()` to the starting pointer gives a pointer
653
+ // at the end of `self`, which fulfills the expectations of `ptr.add()`
654
+ // and `NonNull::new_unchecked()`.
634
655
unsafe {
635
656
let ptr = self . as_mut_ptr ( ) ;
636
657
assume ( !ptr. is_null ( ) ) ;
@@ -1062,6 +1083,8 @@ impl<T> [T] {
1062
1083
let len = self . len ( ) ;
1063
1084
let ptr = self . as_mut_ptr ( ) ;
1064
1085
1086
+ // SAFETY: `[ptr;mid]` and `[mid;len]` are inside `self`, which fulfills the
1087
+ // requirements of `from_raw_parts_mut`.
1065
1088
unsafe {
1066
1089
assert ! ( mid <= len) ;
1067
1090
@@ -1548,14 +1571,14 @@ impl<T> [T] {
1548
1571
while size > 1 {
1549
1572
let half = size / 2 ;
1550
1573
let mid = base + half;
1551
- // mid is always in [0, size), that means mid is >= 0 and < size.
1574
+ // SAFETY: mid is always in [0, size), that means mid is >= 0 and < size.
1552
1575
// mid >= 0: by definition
1553
1576
// mid < size: mid = size / 2 + size / 4 + size / 8 ...
1554
1577
let cmp = f ( unsafe { s. get_unchecked ( mid) } ) ;
1555
1578
base = if cmp == Greater { base } else { mid } ;
1556
1579
size -= half;
1557
1580
}
1558
- // base is always in [0, size) because base <= mid.
1581
+ // SAFETY: base is always in [0, size) because base <= mid.
1559
1582
let cmp = f ( unsafe { s. get_unchecked ( base) } ) ;
1560
1583
if cmp == Equal { Ok ( base) } else { Err ( base + ( cmp == Less ) as usize ) }
1561
1584
}
@@ -2013,6 +2036,13 @@ impl<T> [T] {
2013
2036
let mut next_read: usize = 1 ;
2014
2037
let mut next_write: usize = 1 ;
2015
2038
2039
+ // SAFETY: the `while` condition guarantees `next_read` and `next_write`
2040
+ // are less than `len`, thus are inside `self`. `prev_ptr_write` points to
2041
+ // one element before `ptr_write`, but `next_write` starts at 1, so
2042
+ // `prev_ptr_write` is never less than 0 and is inside the slice.
2043
+ // This fulfils the requirements for dereferencing `ptr_read`, `prev_ptr_write`
2044
+ // and `ptr_write`, and for using `ptr.add(next_read)`, `ptr.add(next_write - 1)`
2045
+ // and `prev_ptr_write.offset(1)`.
2016
2046
unsafe {
2017
2047
// Avoid bounds checks by using raw pointers.
2018
2048
while next_read < len {
@@ -2097,6 +2127,8 @@ impl<T> [T] {
2097
2127
assert ! ( mid <= self . len( ) ) ;
2098
2128
let k = self . len ( ) - mid;
2099
2129
2130
+ // SAFETY: `[mid - mid;mid+k]` corresponds to the entire
2131
+ // `self` slice, thus is valid for reads and writes.
2100
2132
unsafe {
2101
2133
let p = self . as_mut_ptr ( ) ;
2102
2134
rotate:: ptr_rotate ( mid, p. add ( mid) , k) ;
@@ -2138,6 +2170,8 @@ impl<T> [T] {
2138
2170
assert ! ( k <= self . len( ) ) ;
2139
2171
let mid = self . len ( ) - k;
2140
2172
2173
+ // SAFETY: `[mid - mid;mid+k]` corresponds to the entire
2174
+ // `self` slice, thus is valid for reads and writes.
2141
2175
unsafe {
2142
2176
let p = self . as_mut_ptr ( ) ;
2143
2177
rotate:: ptr_rotate ( mid, p. add ( mid) , k) ;
@@ -2300,6 +2334,9 @@ impl<T> [T] {
2300
2334
T : Copy ,
2301
2335
{
2302
2336
assert_eq ! ( self . len( ) , src. len( ) , "destination and source slices have different lengths" ) ;
2337
+ // SAFETY: `self` is valid for `self.len()` bytes by definition, and `src` was
2338
+ // checked to have the same length. Both slices cannot be overlapping because
2339
+ // Rust's mutable references are exclusive.
2303
2340
unsafe {
2304
2341
ptr:: copy_nonoverlapping ( src. as_ptr ( ) , self . as_mut_ptr ( ) , self . len ( ) ) ;
2305
2342
}
@@ -2353,6 +2390,7 @@ impl<T> [T] {
2353
2390
assert ! ( src_end <= self . len( ) , "src is out of bounds" ) ;
2354
2391
let count = src_end - src_start;
2355
2392
assert ! ( dest <= self . len( ) - count, "dest is out of bounds" ) ;
2393
+ // SAFETY: the conditions for `ptr::copy` have been checked above.
2356
2394
unsafe {
2357
2395
ptr:: copy ( self . as_ptr ( ) . add ( src_start) , self . as_mut_ptr ( ) . add ( dest) , count) ;
2358
2396
}
@@ -2408,6 +2446,9 @@ impl<T> [T] {
2408
2446
#[ stable( feature = "swap_with_slice" , since = "1.27.0" ) ]
2409
2447
pub fn swap_with_slice ( & mut self , other : & mut [ T ] ) {
2410
2448
assert ! ( self . len( ) == other. len( ) , "destination and source slices have different lengths" ) ;
2449
+ // SAFETY: `self` is valid for `self.len()` bytes by definition, and `src` was
2450
+ // checked to have the same length. Both slices cannot be overlapping because
2451
+ // Rust's mutable references are exclusive.
2411
2452
unsafe {
2412
2453
ptr:: swap_nonoverlapping ( self . as_mut_ptr ( ) , other. as_mut_ptr ( ) , self . len ( ) ) ;
2413
2454
}
@@ -2439,6 +2480,8 @@ impl<T> [T] {
2439
2480
// iterative stein’s algorithm
2440
2481
// We should still make this `const fn` (and revert to recursive algorithm if we do)
2441
2482
// because relying on llvm to consteval all this is… well, it makes me uncomfortable.
2483
+
2484
+ // SAFETY: `a` and `b` are checked to be non-zero values.
2442
2485
let ( ctz_a, mut ctz_b) = unsafe {
2443
2486
if a == 0 {
2444
2487
return b;
@@ -2458,6 +2501,7 @@ impl<T> [T] {
2458
2501
mem:: swap ( & mut a, & mut b) ;
2459
2502
}
2460
2503
b = b - a;
2504
+ // SAFETY: `b` is checked to be non-zero.
2461
2505
unsafe {
2462
2506
if b == 0 {
2463
2507
break ;
@@ -2848,11 +2892,13 @@ impl<T> SliceIndex<[T]> for usize {
2848
2892
2849
2893
#[ inline]
2850
2894
fn get ( self , slice : & [ T ] ) -> Option < & T > {
2895
+ // SAFETY: `self` is checked to be in bounds.
2851
2896
if self < slice. len ( ) { unsafe { Some ( self . get_unchecked ( slice) ) } } else { None }
2852
2897
}
2853
2898
2854
2899
#[ inline]
2855
2900
fn get_mut ( self , slice : & mut [ T ] ) -> Option < & mut T > {
2901
+ // SAFETY: `self` is checked to be in bounds.
2856
2902
if self < slice. len ( ) { unsafe { Some ( self . get_unchecked_mut ( slice) ) } } else { None }
2857
2903
}
2858
2904
@@ -2888,6 +2934,7 @@ impl<T> SliceIndex<[T]> for ops::Range<usize> {
2888
2934
if self . start > self . end || self . end > slice. len ( ) {
2889
2935
None
2890
2936
} else {
2937
+ // SAFETY: `self` is checked to be valid and in bounds above.
2891
2938
unsafe { Some ( self . get_unchecked ( slice) ) }
2892
2939
}
2893
2940
}
@@ -2897,6 +2944,7 @@ impl<T> SliceIndex<[T]> for ops::Range<usize> {
2897
2944
if self . start > self . end || self . end > slice. len ( ) {
2898
2945
None
2899
2946
} else {
2947
+ // SAFETY: `self` is checked to be valid and in bounds above.
2900
2948
unsafe { Some ( self . get_unchecked_mut ( slice) ) }
2901
2949
}
2902
2950
}
@@ -2918,6 +2966,7 @@ impl<T> SliceIndex<[T]> for ops::Range<usize> {
2918
2966
} else if self . end > slice. len ( ) {
2919
2967
slice_index_len_fail ( self . end , slice. len ( ) ) ;
2920
2968
}
2969
+ // SAFETY: `self` is checked to be valid and in bounds above.
2921
2970
unsafe { self . get_unchecked ( slice) }
2922
2971
}
2923
2972
@@ -2928,6 +2977,7 @@ impl<T> SliceIndex<[T]> for ops::Range<usize> {
2928
2977
} else if self . end > slice. len ( ) {
2929
2978
slice_index_len_fail ( self . end , slice. len ( ) ) ;
2930
2979
}
2980
+ // SAFETY: `self` is checked to be valid and in bounds above.
2931
2981
unsafe { self . get_unchecked_mut ( slice) }
2932
2982
}
2933
2983
}
@@ -3239,6 +3289,8 @@ macro_rules! iterator {
3239
3289
// Helper function for creating a slice from the iterator.
3240
3290
#[ inline( always) ]
3241
3291
fn make_slice( & self ) -> & ' a [ T ] {
3292
+ // SAFETY: the iterator was created from a slice with pointer `self.ptr` and length `len!(self)`.
3293
+ // This guarantees that all the prerequisites for `from_raw_parts` are fulfilled.
3242
3294
unsafe { from_raw_parts( self . ptr. as_ptr( ) , len!( self ) ) }
3243
3295
}
3244
3296
@@ -3292,6 +3344,10 @@ macro_rules! iterator {
3292
3344
#[ inline]
3293
3345
fn next( & mut self ) -> Option <$elem> {
3294
3346
// could be implemented with slices, but this avoids bounds checks
3347
+
3348
+ // SAFETY: `assume` calls are safe since a slice's start pointer must be non-null,
3349
+ // and slices over non-ZSTs must also have a non-null end pointer.
3350
+ // The call to `next_unchecked!` is safe since we check if the iterator is empty first.
3295
3351
unsafe {
3296
3352
assume( !self . ptr. as_ptr( ) . is_null( ) ) ;
3297
3353
if mem:: size_of:: <T >( ) != 0 {
@@ -3325,14 +3381,14 @@ macro_rules! iterator {
3325
3381
// could be (due to wrapping).
3326
3382
self . end = self . ptr. as_ptr( ) ;
3327
3383
} else {
3384
+ // SAFETY: end can't be 0 if T isn't ZST because ptr isn't 0 and end >= ptr
3328
3385
unsafe {
3329
- // End can't be 0 if T isn't ZST because ptr isn't 0 and end >= ptr
3330
3386
self . ptr = NonNull :: new_unchecked( self . end as * mut T ) ;
3331
3387
}
3332
3388
}
3333
3389
return None ;
3334
3390
}
3335
- // We are in bounds. `post_inc_start` does the right thing even for ZSTs.
3391
+ // SAFETY: we are in bounds. `post_inc_start` does the right thing even for ZSTs.
3336
3392
unsafe {
3337
3393
self . post_inc_start( n as isize ) ;
3338
3394
Some ( next_unchecked!( self ) )
@@ -3439,6 +3495,8 @@ macro_rules! iterator {
3439
3495
let mut i = 0 ;
3440
3496
while let Some ( x) = self . next( ) {
3441
3497
if predicate( x) {
3498
+ // SAFETY: we are guaranteed to be in bounds by the loop invariant:
3499
+ // when `i >= n`, `self.next()` returns `None` and the loop breaks.
3442
3500
unsafe { assume( i < n) } ;
3443
3501
return Some ( i) ;
3444
3502
}
@@ -3460,6 +3518,8 @@ macro_rules! iterator {
3460
3518
while let Some ( x) = self . next_back( ) {
3461
3519
i -= 1 ;
3462
3520
if predicate( x) {
3521
+ // SAFETY: `i` must be lower than `n` since it starts at `n`
3522
+ // and is only decreasing.
3463
3523
unsafe { assume( i < n) } ;
3464
3524
return Some ( i) ;
3465
3525
}
@@ -3475,6 +3535,10 @@ macro_rules! iterator {
3475
3535
#[ inline]
3476
3536
fn next_back( & mut self ) -> Option <$elem> {
3477
3537
// could be implemented with slices, but this avoids bounds checks
3538
+
3539
+ // SAFETY: `assume` calls are safe since a slice's start pointer must be non-null,
3540
+ // and slices over non-ZSTs must also have a non-null end pointer.
3541
+ // The call to `next_back_unchecked!` is safe since we check if the iterator is empty first.
3478
3542
unsafe {
3479
3543
assume( !self . ptr. as_ptr( ) . is_null( ) ) ;
3480
3544
if mem:: size_of:: <T >( ) != 0 {
@@ -3495,7 +3559,7 @@ macro_rules! iterator {
3495
3559
self . end = self . ptr. as_ptr( ) ;
3496
3560
return None ;
3497
3561
}
3498
- // We are in bounds. `pre_dec_end` does the right thing even for ZSTs.
3562
+ // SAFETY: we are in bounds. `pre_dec_end` does the right thing even for ZSTs.
3499
3563
unsafe {
3500
3564
self . pre_dec_end( n as isize ) ;
3501
3565
Some ( next_back_unchecked!( self ) )
@@ -3690,6 +3754,8 @@ impl<'a, T> IterMut<'a, T> {
3690
3754
/// ```
3691
3755
#[ stable( feature = "iter_to_slice" , since = "1.4.0" ) ]
3692
3756
pub fn into_slice ( self ) -> & ' a mut [ T ] {
3757
+ // SAFETY: the iterator was created from a mutable slice with pointer `self.ptr` and length `len!(self)`.
3758
+ // This guarantees that all the prerequisites for `from_raw_parts_mut` are fulfilled.
3693
3759
unsafe { from_raw_parts_mut ( self . ptr . as_ptr ( ) , len ! ( self ) ) }
3694
3760
}
3695
3761
@@ -5855,12 +5921,20 @@ pub unsafe fn from_raw_parts_mut<'a, T>(data: *mut T, len: usize) -> &'a mut [T]
5855
5921
/// Converts a reference to T into a slice of length 1 (without copying).
5856
5922
#[ stable( feature = "from_ref" , since = "1.28.0" ) ]
5857
5923
pub fn from_ref < T > ( s : & T ) -> & [ T ] {
5924
+ // SAFETY: a reference is guaranteed to be valid for reads. The returned
5925
+ // reference cannot be mutated as it is an immutable reference.
5926
+ // `mem::size_of::<T>()` cannot be larger than `isize::MAX`.
5927
+ // Thus the call to `from_raw_parts` is safe.
5858
5928
unsafe { from_raw_parts ( s, 1 ) }
5859
5929
}
5860
5930
5861
5931
/// Converts a reference to T into a slice of length 1 (without copying).
5862
5932
#[ stable( feature = "from_ref" , since = "1.28.0" ) ]
5863
5933
pub fn from_mut < T > ( s : & mut T ) -> & mut [ T ] {
5934
+ // SAFETY: a mutable reference is guaranteed to be valid for writes.
5935
+ // The reference cannot be accessed by another pointer as it is an mutable reference.
5936
+ // `mem::size_of::<T>()` cannot be larger than `isize::MAX`.
5937
+ // Thus the call to `from_raw_parts_mut` is safe.
5864
5938
unsafe { from_raw_parts_mut ( s, 1 ) }
5865
5939
}
5866
5940
@@ -5993,6 +6067,9 @@ where
5993
6067
if self . as_ptr ( ) . guaranteed_eq ( other. as_ptr ( ) ) {
5994
6068
return true ;
5995
6069
}
6070
+
6071
+ // SAFETY: `self` and `other` are references and are thus guaranteed to be valid.
6072
+ // The two slices have been checked to have the same size above.
5996
6073
unsafe {
5997
6074
let size = mem:: size_of_val ( self ) ;
5998
6075
memcmp ( self . as_ptr ( ) as * const u8 , other. as_ptr ( ) as * const u8 , size) == 0
@@ -6095,6 +6172,9 @@ impl SliceOrd for u8 {
6095
6172
#[ inline]
6096
6173
fn compare ( left : & [ Self ] , right : & [ Self ] ) -> Ordering {
6097
6174
let order =
6175
+ // SAFETY: `left` and `right` are references and are thus guaranteed to be valid.
6176
+ // We use the minimum of both lengths which guarantees that both regions are
6177
+ // valid for reads in that interval.
6098
6178
unsafe { memcmp ( left. as_ptr ( ) , right. as_ptr ( ) , cmp:: min ( left. len ( ) , right. len ( ) ) ) } ;
6099
6179
if order == 0 {
6100
6180
left. len ( ) . cmp ( & right. len ( ) )
@@ -6164,6 +6244,10 @@ impl SliceContains for u8 {
6164
6244
impl SliceContains for i8 {
6165
6245
fn slice_contains ( & self , x : & [ Self ] ) -> bool {
6166
6246
let byte = * self as u8 ;
6247
+ // SAFETY: `i8` and `u8` have the same memory layout, thus casting `x.as_ptr()`
6248
+ // as `*const u8` is safe. The `x.as_ptr()` comes from a reference and is thus guaranteed
6249
+ // to be valid for reads for the length of the slice `x.len()`, which cannot be larger
6250
+ // than `isize::MAX`. The returned slice is never mutated.
6167
6251
let bytes: & [ u8 ] = unsafe { from_raw_parts ( x. as_ptr ( ) as * const u8 , x. len ( ) ) } ;
6168
6252
memchr:: memchr ( byte, bytes) . is_some ( )
6169
6253
}
0 commit comments