@@ -15,9 +15,54 @@ impl<T: Clone> SpecFill<T> for [T] {
1515}
1616
1717impl < T : Copy > SpecFill < T > for [ T ] {
18- fn spec_fill ( & mut self , value : T ) {
18+ default fn spec_fill ( & mut self , value : T ) {
1919 for item in self . iter_mut ( ) {
2020 * item = value;
2121 }
2222 }
2323}
24+
25+ impl SpecFill < u8 > for [ u8 ] {
26+ fn spec_fill ( & mut self , value : u8 ) {
27+ // SAFETY: The pointer is derived from a reference, so it's writable.
28+ unsafe {
29+ crate :: intrinsics:: write_bytes ( self . as_mut_ptr ( ) , value, self . len ( ) ) ;
30+ }
31+ }
32+ }
33+
34+ impl SpecFill < i8 > for [ i8 ] {
35+ fn spec_fill ( & mut self , value : i8 ) {
36+ // SAFETY: The pointer is derived from a reference, so it's writable.
37+ unsafe {
38+ crate :: intrinsics:: write_bytes ( self . as_mut_ptr ( ) , value. cast_unsigned ( ) , self . len ( ) ) ;
39+ }
40+ }
41+ }
42+
43+ macro spec_fill_int {
44+ ( $( $type: ty) * ) => { $(
45+ impl SpecFill <$type> for [ $type] {
46+ #[ inline]
47+ fn spec_fill ( & mut self , value : $type) {
48+ // We always take this fastpath in Miri for long slices as the manual `for`
49+ // loop can be prohibitively slow.
50+ if ( cfg ! ( miri) && self . len ( ) > 32 ) || crate :: intrinsics:: is_val_statically_known ( value) {
51+ let bytes = value. to_ne_bytes ( ) ;
52+ if value == <$type>:: from_ne_bytes ( [ bytes[ 0 ] ; size_of :: < $type> ( ) ] ) {
53+ // SAFETY: The pointer is derived from a reference, so it's writable.
54+ unsafe {
55+ crate :: intrinsics:: write_bytes ( self . as_mut_ptr ( ) , bytes[ 0 ] , self . len ( ) ) ;
56+ }
57+ return ;
58+ }
59+ }
60+ for item in self . iter_mut ( ) {
61+ * item = value;
62+ }
63+ }
64+ }
65+ ) * }
66+ }
67+
68+ spec_fill_int ! { u16 i16 u32 i32 u64 i64 u128 i128 usize isize }
0 commit comments