@@ -71,6 +71,10 @@ func andSparseWithSparseBitArray(sba, other *sparseBitArray) BitArray {
7171}
7272
7373func andSparseWithDenseBitArray (sba * sparseBitArray , other * bitArray ) BitArray {
74+ if other .IsEmpty () {
75+ return newSparseBitArray ()
76+ }
77+
7478 // Use a duplicate of the sparse array to store the results of the
7579 // bitwise and. More memory-efficient than allocating a new dense bit
7680 // array.
@@ -83,14 +87,14 @@ func andSparseWithDenseBitArray(sba *sparseBitArray, other *bitArray) BitArray {
8387
8488 // Run through the sparse array and attempt comparisons wherever
8589 // possible against the dense bit array.
86- for selfIndex , selfValue := range sba .indices {
90+ for selfIndex , selfValue := range ba .indices {
8791
8892 if selfValue >= uint64 (len (other .blocks )) {
8993 // The dense bit array has been exhausted. This is the
9094 // annoying case because we have to trim the sparse
9195 // array to the size of the dense array.
92- ba .blocks = ba .blocks [:selfIndex ]
93- ba .indices = ba .indices [:selfIndex ]
96+ ba .blocks = ba .blocks [:selfIndex - 1 ]
97+ ba .indices = ba .indices [:selfIndex - 1 ]
9498
9599 // once this is done, there are no more comparisons.
96100 // We're ready to return
@@ -99,9 +103,15 @@ func andSparseWithDenseBitArray(sba *sparseBitArray, other *bitArray) BitArray {
99103 ba .blocks [selfIndex ] = ba .blocks [selfIndex ].and (
100104 other .blocks [selfValue ])
101105
102- if ba .blocks [selfIndex ] == 0 {
103- ba .blocks .deleteAtIndex (int64 (selfIndex ))
104- ba .indices .deleteAtIndex (int64 (selfIndex ))
106+ }
107+
108+ // Ensure any zero'd blocks in the resulting sparse
109+ // array are deleted
110+ for i := 0 ; i < len (ba .blocks ); i ++ {
111+ if ba .blocks [i ] == 0 {
112+ ba .blocks .deleteAtIndex (int64 (i ))
113+ ba .indices .deleteAtIndex (int64 (i ))
114+ i --
105115 }
106116 }
107117
0 commit comments