11use crate :: { BST_BITS , BitSliceType , INLINE_SLICE_PARTS , SmolBitSet } ;
22
33use core:: iter;
4- use core:: ops:: { BitAnd , BitAndAssign , BitOr , BitOrAssign , BitXor , BitXorAssign } ;
4+ use core:: ops:: { BitAnd , BitAndAssign , BitOr , BitOrAssign , BitXor , BitXorAssign , Not } ;
5+
6+ macro_rules! shortening_bitop_fn_body {
7+ ( $lhs: ident, $rhs: ident, $opa: path) => {
8+ match ( $lhs. is_inline( ) , $rhs. is_inline( ) ) {
9+ ( true , _) => unsafe {
10+ let mut lhs = $lhs. get_inline_data_unchecked( ) ;
11+ let rhs = $rhs. get_inlineable_start( ) ;
12+ $opa( & mut lhs, rhs) ;
13+ $lhs. write_inline_data_unchecked( lhs) ;
14+ } ,
15+ ( false , false ) => {
16+ let lhs = unsafe { $lhs. as_slice_mut_unchecked( ) } ;
17+ let rhs = unsafe { $rhs. as_slice_unchecked( ) } ;
18+
19+ // in case lhs > rhs we need to have extra elements
20+ let rhs_iter = rhs. iter( ) . chain( iter:: repeat( & 0 ) ) ;
21+
22+ for ( lhs, rhs) in lhs. iter_mut( ) . zip( rhs_iter) {
23+ $opa( lhs, * rhs) ;
24+ }
25+ }
26+ ( false , true ) => {
27+ let lhs = unsafe { $lhs. as_slice_mut_unchecked( ) } ;
28+ let rhs = unsafe { $rhs. get_inline_data_unchecked( ) } ;
29+
30+ lhs. iter_mut( ) . enumerate( ) . for_each( |( idx, lhs) | {
31+ $opa(
32+ lhs,
33+ rhs. checked_shr( ( idx * BST_BITS ) as u32 ) . unwrap_or( 0 ) as BitSliceType ,
34+ ) ;
35+ } ) ;
36+ }
37+ }
38+ } ;
39+ }
40+
41+ macro_rules! extending_bitop_fn_body {
42+ ( $lhs: ident, $rhs: ident, $opa: path) => {
43+ match ( $lhs. is_inline( ) , $rhs. is_inline( ) ) {
44+ ( true , true ) => unsafe {
45+ let mut lhs = $lhs. get_inline_data_unchecked( ) ;
46+ let rhs = $rhs. get_inline_data_unchecked( ) ;
47+ $opa( & mut lhs, rhs) ;
48+ $lhs. write_inline_data_unchecked( lhs) ;
49+ } ,
50+ ( _, false ) => {
51+ let rhs_hb = $rhs. highest_set_bit( ) ;
52+ if rhs_hb > $lhs. highest_set_bit( ) {
53+ $lhs. ensure_capacity( rhs_hb) ;
54+ }
55+
56+ let lhs = unsafe { $lhs. as_slice_mut_unchecked( ) } ;
57+ let rhs = unsafe { $rhs. as_slice_unchecked( ) } ;
58+
59+ assert!( lhs. len( ) >= rhs. len( ) ) ;
60+
61+ // in case lhs > rhs we need to have extra elements
62+ let rhs_iter = rhs. iter( ) . chain( iter:: repeat( & 0 ) ) ;
63+
64+ for ( lhs, rhs) in lhs. iter_mut( ) . zip( rhs_iter) {
65+ $opa( lhs, * rhs) ;
66+ }
67+ }
68+ ( false , true ) => {
69+ let lhs = unsafe { $lhs. as_slice_mut_unchecked( ) } ;
70+ let rhs = unsafe { $rhs. get_inline_data_unchecked( ) } ;
71+
72+ lhs. iter_mut( )
73+ . enumerate( )
74+ . take( INLINE_SLICE_PARTS )
75+ . for_each( |( idx, lhs) | {
76+ $opa( lhs, ( rhs >> ( idx * BST_BITS ) ) as BitSliceType ) ;
77+ } ) ;
78+ }
79+ }
80+ } ;
81+ }
582
683macro_rules! impl_bitop {
7- ( $( $OP: ident :: $op: ident, $OPA: ident :: $opa: ident) , +) => { $(
84+ ( $( $OP: ident :: $op: ident, $OPA: ident :: $opa: ident, $body_macro : path ; ) +) => { $(
885 impl $OP<Self > for SmolBitSet {
986 type Output = Self ;
1087
@@ -23,9 +100,6 @@ macro_rules! impl_bitop {
23100 }
24101 }
25102
26- impl_bitop!( @ref $OP :: $op, $OPA :: $opa) ;
27- ) * } ;
28- ( @ref $OP: ident :: $op: ident, $OPA: ident :: $opa: ident) => {
29103 impl $OP<& Self > for SmolBitSet {
30104 type Output = Self ;
31105
@@ -39,55 +113,27 @@ macro_rules! impl_bitop {
39113
40114 impl $OPA<& Self > for SmolBitSet {
41115 fn $opa( & mut self , rhs: & Self ) {
42- match ( self . is_inline( ) , rhs. is_inline( ) ) {
43- ( true , true ) => unsafe {
44- let lhs = self . get_inline_data_unchecked( ) ;
45- let rhs = rhs. get_inline_data_unchecked( ) ;
46- self . write_inline_data_unchecked( lhs. $op( rhs) ) ;
47- } ,
48- ( _, false ) => {
49- let rhs_hb = rhs. highest_set_bit( ) ;
50- if rhs_hb > self . highest_set_bit( ) {
51- self . ensure_capacity( rhs_hb) ;
52- }
53-
54- let lhs = unsafe { self . as_slice_mut_unchecked( ) } ;
55- let rhs = unsafe { rhs. as_slice_unchecked( ) } ;
56-
57- assert!( lhs. len( ) >= rhs. len( ) ) ;
58-
59- // in case lhs > rhs we need to have extra elements
60- let rhs_iter = rhs. iter( ) . chain( iter:: repeat( & 0 ) ) ;
61-
62- for ( lhs, rhs) in lhs. iter_mut( ) . zip( rhs_iter) {
63- ( * lhs) . $opa( * rhs) ;
64- }
65- }
66- ( false , true ) => {
67- let lhs = unsafe { self . as_slice_mut_unchecked( ) } ;
68- let rhs = unsafe { rhs. get_inline_data_unchecked( ) } ;
69-
70- lhs. iter_mut( )
71- . enumerate( )
72- . take( INLINE_SLICE_PARTS )
73- . for_each( |( idx, lhs) | {
74- ( * lhs) . $opa( ( rhs >> ( idx * BST_BITS ) ) as BitSliceType ) ;
75- } ) ;
76- }
116+ fn op<T >( lhs: & mut T , rhs: T )
117+ where
118+ T : $OPA<T >
119+ {
120+ ( * lhs) . $opa( rhs) ;
77121 }
122+
123+ $body_macro!( self , rhs, op) ;
78124 }
79125 }
80- } ;
126+ ) * } ;
81127}
82128
83129impl_bitop ! {
84- BitOr :: bitor, BitOrAssign :: bitor_assign,
85- BitAnd :: bitand, BitAndAssign :: bitand_assign,
86- BitXor :: bitxor, BitXorAssign :: bitxor_assign
130+ BitOr :: bitor, BitOrAssign :: bitor_assign, extending_bitop_fn_body ;
131+ BitAnd :: bitand, BitAndAssign :: bitand_assign, shortening_bitop_fn_body ;
132+ BitXor :: bitxor, BitXorAssign :: bitxor_assign, extending_bitop_fn_body ;
87133}
88134
89135macro_rules! impl_bitop_prim {
90- ( $( $OP: ident :: $op: ident, $OPA: ident :: $opa: ident, $t: ty) , +) => { $(
136+ ( $( $OP: ident :: $op: ident, $OPA: ident :: $opa: ident, $t: ty; ) +) => { $(
91137 impl $OP<$t> for SmolBitSet {
92138 type Output = Self ;
93139
@@ -106,9 +152,6 @@ macro_rules! impl_bitop_prim {
106152 }
107153 }
108154
109- impl_bitop_prim!( @ref $OP :: $op, $OPA :: $opa, $t) ;
110- ) * } ;
111- ( @ref $OP: ident :: $op: ident, $OPA: ident :: $opa: ident, $t: ty) => {
112155 impl $OP<& $t> for SmolBitSet {
113156 type Output = Self ;
114157
@@ -124,14 +167,38 @@ macro_rules! impl_bitop_prim {
124167 self . $opa( * rhs)
125168 }
126169 }
127- } ;
170+ ) * } ;
128171 ( $( $t: ty) ,+) => { $(
129172 impl_bitop_prim!{
130- BitOr :: bitor, BitOrAssign :: bitor_assign, $t,
131- BitAnd :: bitand, BitAndAssign :: bitand_assign, $t,
132- BitXor :: bitxor, BitXorAssign :: bitxor_assign, $t
173+ BitOr :: bitor, BitOrAssign :: bitor_assign, $t;
174+ BitAnd :: bitand, BitAndAssign :: bitand_assign, $t;
175+ BitXor :: bitxor, BitXorAssign :: bitxor_assign, $t;
133176 }
134177 ) * } ;
135178}
136179
137180impl_bitop_prim ! ( u8 , u16 , u32 , u64 , u128 , usize ) ;
181+
182+ impl SmolBitSet {
183+ /// Unsets all bits set on the `rhs` [`SmolBitSet`].
184+ ///
185+ /// This is equivalent to `self & !rhs` with integers.
186+ pub fn and_not ( mut self , rhs : & Self ) -> Self {
187+ self . and_not_assign ( rhs) ;
188+ self
189+ }
190+
191+ /// Unsets all bits set on the `rhs` [`SmolBitSet`].
192+ ///
193+ /// This is equivalent to `*self &= !rhs` with integers.
194+ pub fn and_not_assign ( & mut self , rhs : & Self ) {
195+ fn op < T > ( lhs : & mut T , rhs : T )
196+ where
197+ T : BitAndAssign + Not < Output = T > ,
198+ {
199+ * lhs &= !rhs;
200+ }
201+
202+ shortening_bitop_fn_body ! ( self , rhs, op) ;
203+ }
204+ }
0 commit comments