@@ -158,6 +158,24 @@ public static ref T GetSurePresentItemRef<T>(this T[] items, int index)
158158#endif
159159 }
160160
161+ /// <summary>Get the item by-ref without bounds check</summary>
162+ [ MethodImpl ( ( MethodImplOptions ) 256 ) ]
163+ public static ref T GetItemRef < T > (
164+ #if NET7_0_OR_GREATER
165+ this ref T first , int index ) where T : struct => ref Unsafe . Add ( ref first , index ) ;
166+ #else
167+ this T [ ] first , int index) where T : struct => ref first [ index ] ;
168+ #endif
169+
170+ /// <summary>Get the item without bounds check</summary>
171+ [ MethodImpl ( ( MethodImplOptions ) 256 ) ]
172+ internal static T GetItem < T > (
173+ #if NET7_0_OR_GREATER
174+ this ref T start , int index ) where T : struct => Unsafe . Add ( ref start , index ) ;
175+ #else
176+ this T [ ] start , int index) => start[ index ] ;
177+ #endif
178+
161179 // todo: @perf add the not null variant
162180 /// <summary>Appends the new default item to the list and returns ref to it for write or read</summary>
163181 [ MethodImpl ( ( MethodImplOptions ) 256 ) ]
@@ -881,19 +899,19 @@ public struct Entry<K> : IEntry<K>
881899 /// <summary>Binary representation of the `int`</summary>
882900 public static string ToB ( int x ) => System . Convert . ToString ( x , 2 ) . PadLeft ( 32 , '0' ) ;
883901
884- [ MethodImpl ( ( MethodImplOptions ) 256 ) ]
885- #if NET7_0_OR_GREATER
886- internal static ref int NextHashRef ( ref int start , int distance ) => ref Unsafe . Add ( ref start , distance ) ;
887- #else
888- internal static ref int NextHashRef ( ref int [ ] start , int distance ) => ref start [ distance ] ;
889- #endif
902+ // [MethodImpl((MethodImplOptions)256)]
903+ // #if NET7_0_OR_GREATER
904+ // internal static ref int NextHashRef(ref int start, int distance) => ref Unsafe.Add(ref start, distance);
905+ // #else
906+ // internal static ref int NextHashRef(ref int[] start, int distance) => ref start[distance];
907+ // #endif
890908
891- [ MethodImpl ( ( MethodImplOptions ) 256 ) ]
892- #if NET7_0_OR_GREATER
893- internal static int NextHash ( ref int start , int distance ) => Unsafe . Add ( ref start , distance ) ;
894- #else
895- internal static int NextHash ( ref int [ ] start , int distance ) => start [ distance ] ;
896- #endif
909+ // [MethodImpl((MethodImplOptions)256)]
910+ // #if NET7_0_OR_GREATER
911+ // internal static int NextHash(ref int start, int distance) => Unsafe.Add(ref start, distance);
912+ // #else
913+ // internal static int NextHash(ref int[] start, int distance) => start[distance];
914+ // #endif
897915
898916 /// <summary>Abstraction to configure your own entries data structure. Check the derived types for the examples</summary>
899917 public interface IEntries < K , TEntry , TEq >
@@ -947,18 +965,20 @@ public ref TEntry AddKeyAndGetEntryRef(K key, int index)
947965
948966 /// <summary>Lookup for the K in the TStackEntries, first by calculating it hash with TEq and searching the hash in the TStackHashes</summary>
949967 public static ref TEntry TryGetEntryRef < K , TEntry , TEq , TCap , TStackHashes , TStackEntries > (
950- this ref TStackEntries entries , ref TStackHashes hashes , K key , out bool found ,
968+ this ref TStackEntries entries , ref TStackHashes hashes , int count , K key , out bool found ,
951969 TEq eq = default , TCap cap = default , Use < TEntry > _ = default )
952970 where TEntry : struct , IEntry < K >
953971 where TEq : struct , IEq < K >
954972 where TStackHashes : struct , IStack < int , TCap , TStackHashes >
955973 where TStackEntries : struct , IStack < TEntry , TCap , TStackEntries >
956974 where TCap : struct , ISize2Plus
957975 {
976+ Debug . Assert ( count <= cap . Size , $ "SmallMap.TryGetEntryRef: count { count } should be <= stack capacity { cap . Size } ") ;
977+
958978 var hash = eq . GetHashCode ( key ) ;
959979
960980#if NET8_0_OR_GREATER
961- if ( cap . Size >= 8 & Vector256 . IsHardwareAccelerated )
981+ if ( count >= 8 & cap . Size >= 8 & Vector256 . IsHardwareAccelerated )
962982 {
963983 var vHash = Vector256 . Create ( hash ) ;
964984 var vHashes = MemoryMarshal . Cast < int , Vector256 < int > > ( hashes . AsSpan ( ) ) ;
@@ -1084,7 +1104,7 @@ public struct SmallMap<K, TEntry, TEq, TStackCap, TStackHashes, TStackEntries, T
10841104 internal TEntries _entries ;
10851105#pragma warning restore IDE0044
10861106#pragma warning disable CS0649 // Field 'SmallMap<K, V, TEq, TStack, TEntries>.Stack' is never assigned to, and will always have its default value
1087- internal TStackHashes StackHashes ;
1107+ internal TStackHashes _stackHashes ;
10881108 internal TStackEntries StackEntries ;
10891109#pragma warning restore CS0649
10901110
@@ -1143,7 +1163,7 @@ private ref TEntry AddOrGetRefInEntries(K key, out bool found)
11431163#else
11441164 var hashesAndIndexes = _packedHashesAndIndexes ;
11451165#endif
1146- ref var h = ref NextHashRef ( ref hashesAndIndexes , hashIndex ) ;
1166+ ref var h = ref hashesAndIndexes . GetItemRef ( hashIndex ) ;
11471167
11481168 // 1. Skip over hashes with the bigger and equal probes. The hashes with bigger probes overlapping from the earlier ideal positions
11491169 var probes = 1 ;
@@ -1156,7 +1176,7 @@ private ref TEntry AddOrGetRefInEntries(K key, out bool found)
11561176 if ( found = default ( TEq ) . Equals ( e . Key , key ) )
11571177 return ref e ;
11581178 }
1159- h = ref NextHashRef ( ref hashesAndIndexes , ++ hashIndex & indexMask ) ;
1179+ h = ref hashesAndIndexes . GetItemRef ( ++ hashIndex & indexMask ) ;
11601180 ++ probes ;
11611181 }
11621182 found = false ;
@@ -1170,7 +1190,7 @@ private ref TEntry AddOrGetRefInEntries(K key, out bool found)
11701190 probes = hRobinHooded >>> ProbeCountShift ;
11711191 while ( hRobinHooded != 0 )
11721192 {
1173- h = ref NextHashRef ( ref hashesAndIndexes , ++ hashIndex & indexMask ) ;
1193+ h = ref hashesAndIndexes . GetItemRef ( ++ hashIndex & indexMask ) ;
11741194 if ( ( h >>> ProbeCountShift ) < ++ probes )
11751195 {
11761196 var tmp = h ;
@@ -1193,11 +1213,11 @@ private void AddJustHashAndEntryIndexWithoutResizing(int hash, int index)
11931213 var hashesAndIndexes = _packedHashesAndIndexes ;
11941214#endif
11951215 // 1. Skip over hashes with the bigger and equal probes. The hashes with bigger probes overlapping from the earlier ideal positions
1196- ref var h = ref NextHashRef ( ref hashesAndIndexes , hashIndex ) ;
1216+ ref var h = ref hashesAndIndexes . GetItemRef ( hashIndex ) ;
11971217 var probes = 1 ;
11981218 while ( ( h >>> ProbeCountShift ) >= probes )
11991219 {
1200- h = ref NextHashRef ( ref hashesAndIndexes , ++ hashIndex & IndexMask ) ;
1220+ h = ref hashesAndIndexes . GetItemRef ( ++ hashIndex & IndexMask ) ;
12011221 ++ probes ;
12021222 }
12031223
@@ -1210,7 +1230,7 @@ private void AddJustHashAndEntryIndexWithoutResizing(int hash, int index)
12101230 probes = hRobinHooded >>> ProbeCountShift ;
12111231 while ( hRobinHooded != 0 )
12121232 {
1213- h = ref NextHashRef ( ref hashesAndIndexes , ++ hashIndex & IndexMask ) ;
1233+ h = ref hashesAndIndexes . GetItemRef ( ++ hashIndex & IndexMask ) ;
12141234 if ( ( h >>> ProbeCountShift ) < ++ probes )
12151235 {
12161236 var tmp = h ;
@@ -1229,20 +1249,16 @@ public ref TEntry AddOrGetEntryRef(K key, out bool found)
12291249 if ( _count > StackEntries . Capacity )
12301250 return ref AddOrGetRefInEntries ( key , out found ) ;
12311251
1232- // Linear search in stack (which has a few items) by comparing the keys without calculating the hashes
1233- // Saving on the hash calculation. Losing on the bigger number of comparisons.
1234- for ( var i = 0 ; i < _count ; ++ i )
1235- {
1236- ref var e = ref GetSurePresentEntryRef ( i ) ;
1237- if ( found = default ( TEq ) . Equals ( e . Key , key ) )
1238- return ref e ;
1239- }
1240- found = false ;
1252+ ref var e = ref StackEntries . TryGetEntryRef ( ref _stackHashes , _count , key , out found ,
1253+ default ( TEq ) , default ( TStackCap ) , default ( Use < TEntry > ) ) ;
1254+ if ( found )
1255+ return ref e ;
12411256
12421257 // Add the new entry to the stack if there is still space in stack
12431258 if ( _count < StackEntries . Capacity )
12441259 {
12451260 var newIndex = _count ++ ;
1261+ _stackHashes . GetSurePresentItemRef ( newIndex ) = default ( TEq ) . GetHashCode ( key ) ;
12461262 ref var newEntry = ref StackEntries . GetSurePresentItemRef ( newIndex ) ;
12471263 newEntry . Key = key ;
12481264 return ref newEntry ;
@@ -1286,13 +1302,13 @@ private ref TEntry AddSureAbsentDefaultAndGetRefInEntries(K key)
12861302#else
12871303 var hashesAndIndexes = _packedHashesAndIndexes ;
12881304#endif
1289- ref var h = ref NextHashRef ( ref hashesAndIndexes , hashIndex ) ;
1305+ ref var h = ref hashesAndIndexes . GetItemRef ( hashIndex ) ;
12901306
12911307 // 1. Skip over hashes with the bigger and equal probes. The hashes with bigger probes overlapping from the earlier ideal positions
12921308 var probes = 1 ;
12931309 while ( ( h >>> ProbeCountShift ) >= probes )
12941310 {
1295- h = ref NextHashRef ( ref hashesAndIndexes , ++ hashIndex & indexMask ) ;
1311+ h = ref hashesAndIndexes . GetItemRef ( ++ hashIndex & indexMask ) ;
12961312 ++ probes ;
12971313 }
12981314
@@ -1305,7 +1321,7 @@ private ref TEntry AddSureAbsentDefaultAndGetRefInEntries(K key)
13051321 probes = hRobinHooded >>> ProbeCountShift ;
13061322 while ( hRobinHooded != 0 )
13071323 {
1308- h = ref NextHashRef ( ref hashesAndIndexes , ++ hashIndex & indexMask ) ;
1324+ h = ref hashesAndIndexes . GetItemRef ( ++ hashIndex & indexMask ) ;
13091325 if ( ( h >>> ProbeCountShift ) < ++ probes )
13101326 {
13111327 var tmp = h ;
@@ -1384,7 +1400,7 @@ internal ref TEntry TryGetRefInEntries(K key, out bool found)
13841400 var hashesAndIndexes = _packedHashesAndIndexes ;
13851401#endif
13861402
1387- var h = NextHash ( ref hashesAndIndexes , hashIndex ) ;
1403+ var h = hashesAndIndexes . GetItem ( hashIndex ) ;
13881404
13891405 // 1. Skip over hashes with the bigger and equal probes. The hashes with bigger probes overlapping from the earlier ideal positions
13901406 var probes = 1 ;
@@ -1398,7 +1414,7 @@ internal ref TEntry TryGetRefInEntries(K key, out bool found)
13981414 return ref e ;
13991415 }
14001416
1401- h = NextHash ( ref hashesAndIndexes , ++ hashIndex & indexMask ) ;
1417+ h = hashesAndIndexes . GetItem ( ++ hashIndex & indexMask ) ;
14021418 ++ probes ;
14031419 }
14041420
@@ -1445,7 +1461,7 @@ internal int ResizeHashes(int indexMask)
14451461 // Overflow segment is wrapped-around hashes and! the hashes at the beginning robin hooded by the wrapped-around hashes
14461462 var i = 0 ;
14471463 while ( ( oldHash >>> ProbeCountShift ) > 1 )
1448- oldHash = NextHash ( ref oldHashes , ++ i ) ;
1464+ oldHash = oldHashes . GetItem ( ++ i ) ;
14491465
14501466 var oldCapacityWithOverflowSegment = i + oldCapacity ;
14511467 while ( true )
@@ -1457,18 +1473,18 @@ internal int ResizeHashes(int indexMask)
14571473
14581474 // no need for robin-hooding because we already did it for the old hashes and now just filling the hashes into the new array which are already in order
14591475 var probes = 1 ;
1460- ref var newHash = ref NextHashRef ( ref newHashes , indexWithNextBit ) ;
1476+ ref var newHash = ref newHashes . GetItemRef ( indexWithNextBit ) ;
14611477 while ( newHash != 0 )
14621478 {
1463- newHash = ref NextHashRef ( ref newHashes , ++ indexWithNextBit & newIndexMask ) ;
1479+ newHash = ref newHashes . GetItemRef ( ++ indexWithNextBit & newIndexMask ) ;
14641480 ++ probes ;
14651481 }
14661482 newHash = ( probes << ProbeCountShift ) | ( oldHash & newHashAndIndexMask ) ;
14671483 }
14681484 if ( ++ i >= oldCapacityWithOverflowSegment )
14691485 break ;
14701486
1471- oldHash = NextHash ( ref oldHashes , i & indexMask ) ;
1487+ oldHash = oldHashes . GetItem ( i & indexMask ) ;
14721488 }
14731489 ++ _capacityBitShift ;
14741490 _packedHashesAndIndexes = newHashesAndIndexes ;
0 commit comments