3
3
use crate :: mem:: { self , ManuallyDrop , MaybeUninit } ;
4
4
use crate :: slice:: sort:: shared:: FreezeMarker ;
5
5
use crate :: { intrinsics, ptr, slice} ;
6
+ use safety:: { ensures, requires} ;
7
+
8
+ #[ cfg( kani) ]
9
+ use crate :: kani;
6
10
7
11
// It's important to differentiate between SMALL_SORT_THRESHOLD performance for
8
12
// small slices and small-sort performance sorting small sub-slices as part of
@@ -539,6 +543,16 @@ where
539
543
///
540
544
/// # Safety
541
545
/// begin < tail and p must be valid and initialized for all begin <= p <= tail.
546
+ #[ requires( begin. addr( ) < tail. addr( ) && {
547
+ let len = tail. addr( ) - begin. addr( ) - 1 ;
548
+ let is_less: & mut F = unsafe { mem:: transmute( & is_less) } ;
549
+ ( 0 ..len) . into_iter( ) . all( |i| is_less( & * begin. add( i) , & * begin. add( i + 1 ) ) )
550
+ } ) ]
551
+ #[ ensures( |_| {
552
+ let len = tail. addr( ) - begin. addr( ) ;
553
+ let is_less: & mut F = unsafe { mem:: transmute( & is_less) } ;
554
+ ( 0 ..len) . into_iter( ) . all( |i| is_less( & * begin. add( i) , & * begin. add( i + 1 ) ) )
555
+ } ) ]
542
556
unsafe fn insert_tail < T , F : FnMut ( & T , & T ) -> bool > ( begin : * mut T , tail : * mut T , is_less : & mut F ) {
543
557
// SAFETY: see individual comments.
544
558
unsafe {
@@ -556,7 +570,13 @@ unsafe fn insert_tail<T, F: FnMut(&T, &T) -> bool>(begin: *mut T, tail: *mut T,
556
570
let tmp = ManuallyDrop :: new ( tail. read ( ) ) ;
557
571
let mut gap_guard = CopyOnDrop { src : & * tmp, dst : tail, len : 1 } ;
558
572
559
- loop {
573
+ #[ safety:: loop_invariant(
574
+ sift. addr( ) >= begin. addr( ) && sift. addr( ) < tail. addr( )
575
+ ) ]
576
+ // FIXME: This was `loop` but kani's loop contract doesn't support `loop`.
577
+ // Once it is supported, replace `while true` with the original `loop`
578
+ #[ allow( while_true) ]
579
+ while true {
560
580
// SAFETY: we move sift into the gap (which is valid), and point the
561
581
// gap guard destination at sift, ensuring that if a panic occurs the
562
582
// gap is once again filled.
@@ -577,6 +597,14 @@ unsafe fn insert_tail<T, F: FnMut(&T, &T) -> bool>(begin: *mut T, tail: *mut T,
577
597
}
578
598
579
599
/// Sort `v` assuming `v[..offset]` is already sorted.
600
+ #[ requires( offset != 0 && offset <= v. len( ) && {
601
+ let is_less: & mut F = unsafe { mem:: transmute( & is_less) } ;
602
+ v[ ..offset] . is_sorted_by( |a, b| is_less( a, b) )
603
+ } ) ]
604
+ #[ ensures( |_| {
605
+ let is_less: & mut F = unsafe { mem:: transmute( & is_less) } ;
606
+ v. is_sorted_by( |a, b| is_less( a, b) )
607
+ } ) ]
580
608
pub fn insertion_sort_shift_left < T , F : FnMut ( & T , & T ) -> bool > (
581
609
v : & mut [ T ] ,
582
610
offset : usize ,
@@ -596,6 +624,9 @@ pub fn insertion_sort_shift_left<T, F: FnMut(&T, &T) -> bool>(
596
624
let v_base = v. as_mut_ptr ( ) ;
597
625
let v_end = v_base. add ( len) ;
598
626
let mut tail = v_base. add ( offset) ;
627
+ #[ safety:: loop_invariant(
628
+ tail. addr( ) > v_base. addr( ) && tail. addr( ) <= v_end. addr( )
629
+ ) ]
599
630
while tail != v_end {
600
631
// SAFETY: v_base and tail are both valid pointers to elements, and
601
632
// v_base < tail since we checked offset != 0.
@@ -870,3 +901,57 @@ pub(crate) const fn has_efficient_in_place_swap<T>() -> bool {
870
901
// Heuristic that holds true on all tested 64-bit capable architectures.
871
902
mem:: size_of :: < T > ( ) <= 8 // mem::size_of::<u64>()
872
903
}
904
+
905
+ #[ cfg( kani) ]
906
+ #[ unstable( feature = "kani" , issue = "none" ) ]
907
+ mod verify {
908
+ use super :: * ;
909
+
910
+ const MAX_SLICE_LEN : usize = SMALL_SORT_GENERAL_THRESHOLD ;
911
+
912
+ #[ kani:: proof]
913
+ pub fn check_swap_if_less ( ) {
914
+ let mut array: [ u8 ; MAX_SLICE_LEN ] = kani:: any ( ) ;
915
+ let a_pos = kani:: any_where ( |x : & usize | * x < MAX_SLICE_LEN ) ;
916
+ let b_pos = kani:: any_where ( |x : & usize | * x < MAX_SLICE_LEN ) ;
917
+ let mut is_less = |x : & u8 , y : & u8 | x < y;
918
+ let expected = {
919
+ let mut array = array. clone ( ) ;
920
+ let a: u8 = array[ a_pos] ;
921
+ let b: u8 = array[ b_pos] ;
922
+ if is_less ( & b, & a) {
923
+ array[ a_pos] = b;
924
+ array[ b_pos] = a;
925
+ }
926
+ array
927
+ } ;
928
+ unsafe {
929
+ swap_if_less ( array. as_mut_ptr ( ) , a_pos, b_pos, & mut is_less) ;
930
+ }
931
+ kani:: assert ( array == expected, "Swapped slice is different from the expectation" ) ;
932
+ }
933
+
934
+ #[ kani:: proof_for_contract( insert_tail) ]
935
+ #[ kani:: unwind( 17 ) ]
936
+ pub fn check_insert_tail ( ) {
937
+ let mut array: [ u8 ; 17 ] = kani:: any ( ) ;
938
+ let tail = kani:: any_where ( |x : & usize | * x < 17 ) ;
939
+ let mut is_less = |x : & u8 , y : & u8 | x < y;
940
+ unsafe {
941
+ let begin = array. as_mut_ptr ( ) ;
942
+ let tail = begin. add ( tail) ;
943
+ insert_tail ( begin, tail, & mut is_less) ;
944
+ }
945
+ }
946
+
947
+ #[ kani:: proof_for_contract( insertion_sort_shift_left) ]
948
+ #[ kani:: stub_verified( insert_tail) ]
949
+ #[ kani:: unwind( 17 ) ]
950
+ pub fn check_insertion_sort_shift_left ( ) {
951
+ let mut array: [ u8 ; 17 ] = kani:: any ( ) ;
952
+ let slice_len = kani:: any_where ( |x : & usize | * x != 0 && * x <= 17 ) ;
953
+ let offset = kani:: any ( ) ;
954
+ let mut is_less = |x : & u8 , y : & u8 | x < y;
955
+ insertion_sort_shift_left ( & mut array[ ..slice_len] , offset, & mut is_less) ;
956
+ }
957
+ }
0 commit comments