3
3
use crate :: mem:: { self , ManuallyDrop , MaybeUninit } ;
4
4
use crate :: slice:: sort:: shared:: FreezeMarker ;
5
5
use crate :: { intrinsics, ptr, slice} ;
6
+ use safety:: { ensures, requires} ;
7
+
8
+ #[ allow( unused_imports) ]
9
+ use crate :: ub_checks;
10
+
11
+ #[ cfg( kani) ]
12
+ use crate :: kani;
6
13
7
14
// It's important to differentiate between SMALL_SORT_THRESHOLD performance for
8
15
// small slices and small-sort performance sorting small sub-slices as part of
@@ -62,6 +69,26 @@ impl<T: FreezeMarker> StableSmallSortTypeImpl for T {
62
69
}
63
70
}
64
71
72
+ // // This wrapper contract function exists because;
73
+ // // - kani contract attribute macros doesnt't work with `default fn`
74
+ // // - we cannot specify the trait member function with `proof_for_contract`
75
+ // #[cfg(kani)]
76
+ // #[kani::modifies(v)]
77
+ // #[ensures(|_| {
78
+ // let mut is_less = is_less.clone();
79
+ // v.is_sorted_by(|a, b| !is_less(b, a))
80
+ // })]
81
+ // pub(crate) fn _stable_small_sort_type_impl_small_sort<T, F>(
82
+ // v: &mut [T],
83
+ // scratch: &mut [MaybeUninit<T>],
84
+ // is_less: &mut F,
85
+ // )
86
+ // where
87
+ // F: FnMut(&T, &T) -> bool + Clone,
88
+ // {
89
+ // <T as StableSmallSortTypeImpl>::small_sort(v, scratch, is_less);
90
+ // }
91
+
65
92
/// Using a trait allows us to specialize on `Freeze` which in turn allows us to make safe
66
93
/// abstractions.
67
94
pub ( crate ) trait UnstableSmallSortTypeImpl : Sized {
@@ -102,6 +129,25 @@ impl<T: FreezeMarker> UnstableSmallSortTypeImpl for T {
102
129
}
103
130
}
104
131
132
+ // // This wrapper contract function exists because;
133
+ // // - kani contract attribute macros doesnt't work with `default fn`
134
+ // // - we cannot specify the trait member function with `proof_for_contract`
135
+ // #[cfg(kani)]
136
+ // #[kani::modifies(v)]
137
+ // #[ensures(|_| {
138
+ // let mut is_less = is_less.clone();
139
+ // v.is_sorted_by(|a, b| !is_less(b, a))
140
+ // })]
141
+ // pub(crate) fn _unstable_small_sort_type_impl_small_sort<T, F>(
142
+ // v: &mut [T],
143
+ // is_less: &mut F,
144
+ // )
145
+ // where
146
+ // F: FnMut(&T, &T) -> bool + Clone,
147
+ // {
148
+ // <T as UnstableSmallSortTypeImpl>::small_sort(v, is_less);
149
+ // }
150
+
105
151
/// FIXME(const_trait_impl) use original ipnsort approach with choose_unstable_small_sort,
106
152
/// as found here <https://github.com/Voultapher/sort-research-rs/blob/438fad5d0495f65d4b72aa87f0b62fc96611dff3/ipnsort/src/smallsort.rs#L83C10-L83C36>.
107
153
pub ( crate ) trait UnstableSmallSortFreezeTypeImpl : Sized + FreezeMarker {
@@ -170,6 +216,26 @@ impl<T: FreezeMarker + CopyMarker> UnstableSmallSortFreezeTypeImpl for T {
170
216
}
171
217
}
172
218
219
+ // // This wrapper contract function exists because;
220
+ // // - kani contract attribute macros doesnt't work with `default fn`
221
+ // // - we cannot specify the trait member function with `proof_for_contract`
222
+ // #[cfg(kani)]
223
+ // #[kani::modifies(v)]
224
+ // #[ensures(|_| {
225
+ // let mut is_less = is_less.clone();
226
+ // v.is_sorted_by(|a, b| !is_less(b, a))
227
+ // })]
228
+ // pub(crate) fn _unstable_small_sort_freeze_type_impl_small_sort<T, F>(
229
+ // v: &mut [T],
230
+ // is_less: &mut F,
231
+ // )
232
+ // where
233
+ // T: FreezeMarker + CopyMarker,
234
+ // F: FnMut(&T, &T) -> bool + Clone,
235
+ // {
236
+ // <T as UnstableSmallSortFreezeTypeImpl>::small_sort(v, is_less);
237
+ // }
238
+
173
239
/// Optimal number of comparisons, and good perf.
174
240
const SMALL_SORT_FALLBACK_THRESHOLD : usize = 16 ;
175
241
@@ -539,6 +605,27 @@ where
539
605
///
540
606
/// # Safety
541
607
/// begin < tail and p must be valid and initialized for all begin <= p <= tail.
608
+ #[ cfg_attr(
609
+ kani,
610
+ kani:: modifies(
611
+ ptr:: slice_from_raw_parts_mut( begin, tail. addr( ) . saturating_sub( head. addr( ) - 1 ) )
612
+ )
613
+ ) ]
614
+ #[ requires( begin. addr( ) < tail. addr( ) && {
615
+ let len = tail. addr( ) - begin. addr( ) ;
616
+ let is_less: & mut F = unsafe { mem:: transmute( & is_less) } ;
617
+ ( 0 ..=len) . into_iter( ) . all( |i| {
618
+ let p = begin. add( i) ;
619
+ ub_checks:: can_dereference( p as * const _) &&
620
+ ub_checks:: can_write( p) &&
621
+ ub_checks:: same_allocation( begin as * const _, p as * const _)
622
+ } ) && ( 0 ..( len - 1 ) ) . into_iter( ) . all( |i| !is_less( & * begin. add( i + 1 ) , & * begin. add( i) ) )
623
+ } ) ]
624
+ #[ ensures( |_| {
625
+ let len = tail. addr( ) - begin. addr( ) ;
626
+ let is_less: & mut F = unsafe { mem:: transmute( & is_less) } ;
627
+ ( 0 ..len) . into_iter( ) . all( |i| !is_less( & * begin. add( i + 1 ) , & * begin. add( i) ) )
628
+ } ) ]
542
629
unsafe fn insert_tail < T , F : FnMut ( & T , & T ) -> bool > ( begin : * mut T , tail : * mut T , is_less : & mut F ) {
543
630
// SAFETY: see individual comments.
544
631
unsafe {
@@ -556,7 +643,13 @@ unsafe fn insert_tail<T, F: FnMut(&T, &T) -> bool>(begin: *mut T, tail: *mut T,
556
643
let tmp = ManuallyDrop :: new ( tail. read ( ) ) ;
557
644
let mut gap_guard = CopyOnDrop { src : & * tmp, dst : tail, len : 1 } ;
558
645
559
- loop {
646
+ #[ safety:: loop_invariant(
647
+ sift. addr( ) >= begin. addr( ) && sift. addr( ) < tail. addr( )
648
+ ) ]
649
+ // FIXME: This was `loop` but kani's loop contract doesn't support `loop`.
650
+ // Once it is supported, replace `while true` with the original `loop`
651
+ #[ allow( while_true) ]
652
+ while true {
560
653
// SAFETY: we move sift into the gap (which is valid), and point the
561
654
// gap guard destination at sift, ensuring that if a panic occurs the
562
655
// gap is once again filled.
@@ -577,6 +670,15 @@ unsafe fn insert_tail<T, F: FnMut(&T, &T) -> bool>(begin: *mut T, tail: *mut T,
577
670
}
578
671
579
672
/// Sort `v` assuming `v[..offset]` is already sorted.
673
+ #[ cfg_attr( kani, kani:: modifies( v) ) ]
674
+ #[ requires( offset != 0 && offset <= v. len( ) && {
675
+ let is_less: & mut F = unsafe { mem:: transmute( & is_less) } ;
676
+ v[ ..offset] . is_sorted_by( |a, b| !is_less( b, a) )
677
+ } ) ]
678
+ #[ ensures( |_| {
679
+ let is_less: & mut F = unsafe { mem:: transmute( & is_less) } ;
680
+ v. is_sorted_by( |a, b| !is_less( b, a) )
681
+ } ) ]
580
682
pub fn insertion_sort_shift_left < T , F : FnMut ( & T , & T ) -> bool > (
581
683
v : & mut [ T ] ,
582
684
offset : usize ,
@@ -597,6 +699,14 @@ pub fn insertion_sort_shift_left<T, F: FnMut(&T, &T) -> bool>(
597
699
let v_end = v_base. add ( len) ;
598
700
let mut tail = v_base. add ( offset) ;
599
701
while tail != v_end {
702
+ // FIXME: This should be loop contract but sadly, making this into
703
+ // loop invariant causes OOM
704
+ #[ cfg( kani) ]
705
+ kani:: assert (
706
+ tail. addr ( ) > v_base. addr ( ) && tail. addr ( ) <= v_end. addr ( ) ,
707
+ "loop invariants" ,
708
+ ) ;
709
+
600
710
// SAFETY: v_base and tail are both valid pointers to elements, and
601
711
// v_base < tail since we checked offset != 0.
602
712
insert_tail ( v_base, tail, is_less) ;
@@ -609,6 +719,28 @@ pub fn insertion_sort_shift_left<T, F: FnMut(&T, &T) -> bool>(
609
719
610
720
/// SAFETY: The caller MUST guarantee that `v_base` is valid for 4 reads and
611
721
/// `dst` is valid for 4 writes. The result will be stored in `dst[0..4]`.
722
+ #[ cfg_attr( kani, kani:: modifies( dst, dst. add( 1 ) , dst. add( 2 ) , dst. add( 3 ) ) ) ]
723
+ #[ requires(
724
+ ( 0 ..4 ) . into_iter( ) . all( |i| {
725
+ let p = v_base. add( i) ;
726
+ ub_checks:: can_dereference( p) &&
727
+ ub_checks:: same_allocation( v_base, p)
728
+ } )
729
+ ) ]
730
+ #[ requires(
731
+ ( 0 ..4 ) . into_iter( ) . all( |i| {
732
+ let p = dst. add( i) ;
733
+ ub_checks:: can_write( p) &&
734
+ ub_checks:: same_allocation( dst, p)
735
+ } )
736
+ ) ]
737
+ #[ ensures( |_| {
738
+ let is_less: & mut F = unsafe { mem:: transmute( & is_less) } ;
739
+ ( 0 ..3 ) . into_iter( ) . all( |i| !is_less(
740
+ & * dst. add( i + 1 ) ,
741
+ & * dst. add( i) ,
742
+ ) )
743
+ } ) ]
612
744
pub unsafe fn sort4_stable < T , F : FnMut ( & T , & T ) -> bool > (
613
745
v_base : * const T ,
614
746
dst : * mut T ,
@@ -870,3 +1002,136 @@ pub(crate) const fn has_efficient_in_place_swap<T>() -> bool {
870
1002
// Heuristic that holds true on all tested 64-bit capable architectures.
871
1003
mem:: size_of :: < T > ( ) <= 8 // mem::size_of::<u64>()
872
1004
}
1005
+
1006
+ #[ cfg( kani) ]
1007
+ #[ unstable( feature = "kani" , issue = "none" ) ]
1008
+ mod verify {
1009
+ use super :: * ;
1010
+
1011
+ // The maximum length of the slice that `insertion_sort_shift_left`
1012
+ // is called upon.
1013
+ // The value is from the following line;
1014
+ // https://github.com/model-checking/verify-rust-std/blob/1a38674ad6753e3a78e0181d1fe613f3b25ebacd/library/core/src/slice/sort/shared/smallsort.rs#L330
1015
+ const INSERTION_SORT_MAX_LEN : usize = 17 ;
1016
+
1017
+ #[ kani:: proof]
1018
+ pub fn check_swap_if_less ( ) {
1019
+ let mut array: [ u8 ; SMALL_SORT_GENERAL_THRESHOLD ] = kani:: any ( ) ;
1020
+ let a_pos = kani:: any_where ( |x : & usize | * x < SMALL_SORT_GENERAL_THRESHOLD ) ;
1021
+ let b_pos = kani:: any_where ( |x : & usize | * x < SMALL_SORT_GENERAL_THRESHOLD ) ;
1022
+ let mut is_less = |x : & u8 , y : & u8 | x < y;
1023
+ let expected = {
1024
+ let mut array = array. clone ( ) ;
1025
+ let a: u8 = array[ a_pos] ;
1026
+ let b: u8 = array[ b_pos] ;
1027
+ if is_less ( & b, & a) {
1028
+ array[ a_pos] = b;
1029
+ array[ b_pos] = a;
1030
+ }
1031
+ array
1032
+ } ;
1033
+ unsafe {
1034
+ swap_if_less ( array. as_mut_ptr ( ) , a_pos, b_pos, & mut is_less) ;
1035
+ }
1036
+ kani:: assert (
1037
+ array == expected,
1038
+ "swapped slice is different from the expectation"
1039
+ ) ;
1040
+ }
1041
+
1042
+ #[ kani:: proof_for_contract( insert_tail) ]
1043
+ #[ kani:: unwind( 17 ) ]
1044
+ pub fn check_insert_tail ( ) {
1045
+ let mut array: [ u8 ; INSERTION_SORT_MAX_LEN ] = kani:: any ( ) ;
1046
+ let tail = kani:: any_where ( |x : & usize | * x < INSERTION_SORT_MAX_LEN ) ;
1047
+ let mut is_less = |x : & u8 , y : & u8 | x < y;
1048
+ unsafe {
1049
+ let begin = array. as_mut_ptr ( ) ;
1050
+ let tail = begin. add ( tail) ;
1051
+ insert_tail ( begin, tail, & mut is_less) ;
1052
+ }
1053
+ }
1054
+
1055
+ // FIXME: Ideally, this should be `proof_for_contract(insertion_sort_shift_left)`,
1056
+ // but this failes with OOM due to `proof_for_contract`'s perf issue.
1057
+ //
1058
+ // See https://github.com/model-checking/kani/issues/3797
1059
+ #[ kani:: proof]
1060
+ #[ kani:: stub_verified( insert_tail) ]
1061
+ #[ kani:: unwind( 17 ) ]
1062
+ pub fn check_insertion_sort_shift_left ( ) {
1063
+ let slice_len = kani:: any_where ( |x : & usize | {
1064
+ * x != 0 && * x <= INSERTION_SORT_MAX_LEN
1065
+ } ) ;
1066
+ let offset = kani:: any_where ( |x : & usize | * x != 0 && * x <= slice_len) ;
1067
+ let mut is_less = |x : & u8 , y : & u8 | x < y;
1068
+ let mut array = kani:: any_where ( |x : & [ u8 ; INSERTION_SORT_MAX_LEN ] | {
1069
+ x[ ..offset] . is_sorted_by ( |a, b| !is_less ( b, a) )
1070
+ } ) ;
1071
+ insertion_sort_shift_left ( & mut array[ ..slice_len] , offset, & mut is_less) ;
1072
+ kani:: assert (
1073
+ array[ ..slice_len] . is_sorted_by ( |a, b| !is_less ( b, a) ) ,
1074
+ "slice is not sorted" ,
1075
+ ) ;
1076
+ }
1077
+
1078
+ #[ kani:: proof_for_contract( sort4_stable) ]
1079
+ pub fn check_sort4_stable ( ) {
1080
+ let src: [ u8 ; 4 ] = kani:: any ( ) ;
1081
+ let mut dst = MaybeUninit :: < [ u8 ; 4 ] > :: uninit ( ) ;
1082
+ let mut is_less = |x : & u8 , y : & u8 | x < y;
1083
+ unsafe {
1084
+ sort4_stable ( src. as_ptr ( ) , dst. as_mut_ptr ( ) as * mut _ , & mut is_less) ;
1085
+ }
1086
+ }
1087
+
1088
+ #[ kani:: proof]
1089
+ pub fn check_sort4_stable_stability ( ) {
1090
+ let src: [ ( u8 , u8 ) ; 4 ] = [
1091
+ ( kani:: any ( ) , 0 ) ,
1092
+ ( kani:: any ( ) , 1 ) ,
1093
+ ( kani:: any ( ) , 2 ) ,
1094
+ ( kani:: any ( ) , 3 ) ,
1095
+ ] ;
1096
+ let mut dst = MaybeUninit :: < [ ( u8 , u8 ) ; 4 ] > :: uninit ( ) ;
1097
+ let mut is_less = |x : & ( u8 , u8 ) , y : & ( u8 , u8 ) | x. 0 < y. 0 ;
1098
+ unsafe {
1099
+ sort4_stable ( src. as_ptr ( ) , dst. as_mut_ptr ( ) as * mut _ , & mut is_less) ;
1100
+ }
1101
+ let dst = unsafe { dst. assume_init ( ) } ;
1102
+ let mut is_stably_less = |x : & ( u8 , u8 ) , y : & ( u8 , u8 ) | {
1103
+ if x. 0 == y. 0 {
1104
+ x. 1 < y. 1
1105
+ } else {
1106
+ x. 0 < y. 0
1107
+ }
1108
+ } ;
1109
+ kani:: assert (
1110
+ dst. is_sorted_by ( |a, b| !is_stably_less ( b, a) ) ,
1111
+ "slice is not stably sorted" ,
1112
+ ) ;
1113
+ }
1114
+
1115
+ #[ kani:: proof]
1116
+ pub fn check_has_efficient_in_place_swap ( ) {
1117
+ // There aren't much to verify as the function just calls `mem::size_of`.
1118
+ // So, just brought some tests written by the author of smallsort,
1119
+ // from Voultapher/sort-research-rs
1120
+ //
1121
+ // https://github.com/Voultapher/sort-research-rs/blob/c0cb46214a8d6e10ae5f4e0c363c2dbcbf71966f/ipnsort/src/smallsort.rs#L758-L771
1122
+ assert ! ( has_efficient_in_place_swap:: <i32 >( ) ) ;
1123
+ assert ! ( has_efficient_in_place_swap:: <u64 >( ) ) ;
1124
+ assert ! ( !has_efficient_in_place_swap:: <u128 >( ) ) ;
1125
+ }
1126
+
1127
+ // #[kani::proof_for_contract(_stable_small_sort_type_impl_small_sort)]
1128
+ // #[kani::stub_verified(insertion_sort_shift_left)]
1129
+ // pub fn check_stable_small_sort_type_impl_small_sort() {
1130
+ // let mut array: [u8; SMALL_SORT_FALLBACK_THRESHOLD] = kani::any();
1131
+ // let len = kani::any_where(|x: &usize| *x <= SMALL_SORT_FALLBACK_THRESHOLD);
1132
+ // let mut scratch: [MaybeUninit<u8>; SMALL_SORT_GENERAL_SCRATCH_LEN]
1133
+ // = [const { MaybeUninit::uninit() }; SMALL_SORT_GENERAL_SCRATCH_LEN];
1134
+ // let mut is_less = |x: &u8, y: &u8| x < y;
1135
+ // _stable_small_sort_type_impl_small_sort(&mut array[..len], &mut scratch, &mut is_less);
1136
+ // }
1137
+ }
0 commit comments