@@ -7,7 +7,7 @@ use std::{
77 cmp:: { min, PartialEq } ,
88 fmt:: { self , Debug } ,
99 hash:: { Hash , Hasher } ,
10- iter:: FromIterator ,
10+ iter:: { DoubleEndedIterator , FromIterator } ,
1111 mem:: { replace, size_of} ,
1212 ops:: {
1313 Bound :: { Excluded , Included , Unbounded } ,
@@ -1101,6 +1101,10 @@ impl<'a, T: Debug + PrimInt> IntoIterator for &'a Vob<T> {
11011101 }
11021102}
11031103
1104+ // In all of the iterators below, we try to optimise what we expect to be the common cases (all
1105+ // bits set or all bits unset) with the general case (random bit patterns). We assume that
1106+ // `leading_zeros` and `trailing_zeros` have efficient implementations on most CPUs.
1107+
11041108#[ derive( Clone ) ]
11051109pub struct IterSetBits < ' a , T : ' a > {
11061110 vob : & ' a Vob < T > ,
@@ -1113,24 +1117,15 @@ impl<T: Debug + PrimInt> Iterator for IterSetBits<'_, T> {
11131117 fn next ( & mut self ) -> Option < usize > {
11141118 debug_assert ! ( self . range. end <= self . vob. len) ;
11151119 if let Some ( i) = self . range . next ( ) {
1116- // This is a long, but fairly fast, way of finding out what the next set bit is. The
1117- // basic problem is that we have no idea where the next set bit is -- but different
1118- // patterns of set bits are most efficiently handled by different code paths. This code
1119- // is thus a compromise: we prioritise two special cases (all bits set; all bits unset)
1120- // for efficiency, and try and make the other possible cases reasonably fast.
11211120 let mut b = block_offset :: < T > ( i) ;
11221121 let mut v = self . vob . vec [ b] ;
1123- // If all bits are set, we don't need to do any complicated checks.
11241122 if v == T :: max_value ( ) {
11251123 return Some ( i) ;
11261124 }
1127- // At this point we've got a block which might or might not have some bits set. We now
1128- // fall back to the general case.
11291125 let mut i_off = i % bits_per_block :: < T > ( ) ;
11301126 loop {
11311127 let tz = ( v >> i_off) . trailing_zeros ( ) as usize ;
11321128 if tz < bits_per_block :: < T > ( ) {
1133- // There is a bit set after i_off in the block.
11341129 let bs = b * bits_per_block :: < T > ( ) + i_off + tz;
11351130 self . range . start = bs + 1 ;
11361131 if bs >= self . range . end {
@@ -1140,7 +1135,6 @@ impl<T: Debug + PrimInt> Iterator for IterSetBits<'_, T> {
11401135 }
11411136 b += 1 ;
11421137 if b == blocks_required :: < T > ( self . range . end ) {
1143- // We've exhausted the iterator.
11441138 self . range . start = self . range . end ;
11451139 break ;
11461140 }
@@ -1156,6 +1150,38 @@ impl<T: Debug + PrimInt> Iterator for IterSetBits<'_, T> {
11561150 }
11571151}
11581152
1153+ impl < T : Debug + PrimInt > DoubleEndedIterator for IterSetBits < ' _ , T > {
1154+ fn next_back ( & mut self ) -> Option < usize > {
1155+ if let Some ( i) = self . range . next_back ( ) {
1156+ let mut b = block_offset :: < T > ( i) ;
1157+ let mut v = self . vob . vec [ b] ;
1158+ if v == T :: max_value ( ) {
1159+ return Some ( i) ;
1160+ }
1161+ let mut i_off = i % bits_per_block :: < T > ( ) ;
1162+ loop {
1163+ let lz = ( v << ( bits_per_block :: < T > ( ) - 1 - i_off) ) . leading_zeros ( ) as usize ;
1164+ if lz < bits_per_block :: < T > ( ) {
1165+ let bs = b * bits_per_block :: < T > ( ) + i_off - lz;
1166+ self . range . end = bs;
1167+ if bs < self . range . start {
1168+ break ;
1169+ }
1170+ return Some ( bs) ;
1171+ }
1172+ if b == block_offset :: < T > ( self . range . start ) {
1173+ self . range . start = self . range . end ;
1174+ break ;
1175+ }
1176+ b -= 1 ;
1177+ v = self . vob . vec [ b] ;
1178+ i_off = bits_per_block :: < T > ( ) - 1 ;
1179+ }
1180+ }
1181+ None
1182+ }
1183+ }
1184+
11591185#[ derive( Clone ) ]
11601186pub struct IterUnsetBits < ' a , T : ' a > {
11611187 vob : & ' a Vob < T > ,
@@ -1168,19 +1194,11 @@ impl<T: Debug + PrimInt> Iterator for IterUnsetBits<'_, T> {
11681194 fn next ( & mut self ) -> Option < usize > {
11691195 debug_assert ! ( self . range. end <= self . vob. len) ;
11701196 if let Some ( i) = self . range . next ( ) {
1171- // This is a long, but fairly fast, way of finding out what the next usset bit is. The
1172- // basic problem is that we have no idea where the next set bit is -- but different
1173- // patterns of set bits are most efficiently handled by different code paths. This code
1174- // is thus a compromise: we prioritise two special cases (all bits set; all bits unset)
1175- // for efficiency, and try and make the other possible cases reasonably fast.
11761197 let mut b = block_offset :: < T > ( i) ;
11771198 let mut v = self . vob . vec [ b] ;
1178- // If no bits are set, we don't need to do any complicated checks.
11791199 if v == T :: zero ( ) {
11801200 return Some ( i) ;
11811201 }
1182- // At this point we've got a block which might or might not have some bits unset. We
1183- // now fall back to the general case.
11841202 let mut i_off = i % bits_per_block :: < T > ( ) ;
11851203 loop {
11861204 let tz = ( !v >> i_off) . trailing_zeros ( ) as usize ;
@@ -1189,15 +1207,12 @@ impl<T: Debug + PrimInt> Iterator for IterUnsetBits<'_, T> {
11891207 let bs = b * bits_per_block :: < T > ( ) + i_off + tz;
11901208 self . range . start = bs + 1 ;
11911209 if bs >= self . range . end {
1192- // The unset bit is after the range we're looking for, so we've reached
1193- // the end of the iterator.
11941210 break ;
11951211 }
11961212 return Some ( bs) ;
11971213 }
11981214 b += 1 ;
11991215 if b == blocks_required :: < T > ( self . range . end ) {
1200- // We've exhausted the iterator.
12011216 self . range . start = self . range . end ;
12021217 break ;
12031218 }
@@ -1542,6 +1557,7 @@ mod tests {
15421557 hash:: { Hash , Hasher } ,
15431558 iter:: FromIterator ,
15441559 mem:: size_of,
1560+ ops:: RangeBounds ,
15451561 } ;
15461562
15471563 #[ test]
@@ -1791,28 +1807,38 @@ mod tests {
17911807
17921808 #[ test]
17931809 fn test_iter_set_bits ( ) {
1810+ fn t < R > ( v : & Vob , range : R , expected : Vec < usize > )
1811+ where
1812+ R : Clone + RangeBounds < usize > ,
1813+ {
1814+ let rev = expected. iter ( ) . cloned ( ) . rev ( ) . collect :: < Vec < usize > > ( ) ;
1815+ assert_eq ! (
1816+ v. iter_set_bits( range. clone( ) ) . collect:: <Vec <usize >>( ) ,
1817+ expected
1818+ ) ;
1819+ assert_eq ! ( v. iter_set_bits( range) . rev( ) . collect:: <Vec <usize >>( ) , rev) ;
1820+ }
1821+
1822+ t ( & vob ! [ ] , .., vec ! [ ] ) ;
1823+ t ( & Vob :: from_elem ( true , 131 ) , .., ( 0 ..131 ) . collect :: < Vec < _ > > ( ) ) ;
1824+
17941825 let mut v1 = vob ! [ false , true , false , true ] ;
1795- assert_eq ! ( v1 . iter_set_bits ( .. ) . collect :: < Vec < usize >> ( ) , vec![ 1 , 3 ] ) ;
1826+ t ( & v1 , .. , vec ! [ 1 , 3 ] ) ;
17961827 v1. resize ( 127 , false ) ;
17971828 v1. push ( true ) ;
17981829 v1. push ( false ) ;
17991830 v1. push ( true ) ;
18001831 v1. push ( true ) ;
18011832 v1. resize ( 256 , false ) ;
18021833 v1. push ( true ) ;
1803- assert_eq ! (
1804- v1. iter_set_bits( ..) . collect:: <Vec <usize >>( ) ,
1805- vec![ 1 , 3 , 127 , 129 , 130 , 256 ]
1806- ) ;
1807- assert_eq ! (
1808- v1. iter_set_bits( 2 ..256 ) . collect:: <Vec <usize >>( ) ,
1809- vec![ 3 , 127 , 129 , 130 ]
1810- ) ;
1811- assert_eq ! (
1812- v1. iter_set_bits( 2 ..) . collect:: <Vec <usize >>( ) ,
1813- vec![ 3 , 127 , 129 , 130 , 256 ]
1814- ) ;
1815- assert_eq ! ( v1. iter_set_bits( ..3 ) . collect:: <Vec <usize >>( ) , vec![ 1 ] ) ;
1834+ assert_eq ! ( v1. len( ) , 257 ) ;
1835+ t ( & v1, .., vec ! [ 1 , 3 , 127 , 129 , 130 , 256 ] ) ;
1836+ t ( & v1, 2 ..257 , vec ! [ 3 , 127 , 129 , 130 , 256 ] ) ;
1837+ t ( & v1, 2 ..256 , vec ! [ 3 , 127 , 129 , 130 ] ) ;
1838+ t ( & v1, 2 .., vec ! [ 3 , 127 , 129 , 130 , 256 ] ) ;
1839+ t ( & v1, 1 ..255 , vec ! [ 1 , 3 , 127 , 129 , 130 ] ) ;
1840+ t ( & v1, ..3 , vec ! [ 1 ] ) ;
1841+ t ( & v1, 128 .., vec ! [ 129 , 130 , 256 ] ) ;
18161842 }
18171843
18181844 #[ test]
0 commit comments