@@ -74,7 +74,7 @@ internal async Task<object[]> GetCollectionBatchAsync(ICollectionPersister colle
74
74
foreach ( KeyValuePair < CollectionEntry , IPersistentCollection > me in map )
75
75
{
76
76
cancellationToken . ThrowIfCancellationRequested ( ) ;
77
- if ( await ( ProcessKeyAsync ( me ) ) . ConfigureAwait ( false ) )
77
+ if ( await ( ProcessKeyAndCheckCacheAsync ( me ) ) . ConfigureAwait ( false ) )
78
78
{
79
79
return keys ;
80
80
}
@@ -107,7 +107,7 @@ async Task<bool> CheckCacheAndProcessResultAsync()
107
107
{
108
108
for ( var j = 0 ; j < collectionKeys . Count ; j ++ )
109
109
{
110
- if ( await ( ProcessKeyAsync ( collectionKeys [ indexes [ j ] ] . Key ) ) . ConfigureAwait ( false ) )
110
+ if ( ProcessKey ( collectionKeys [ indexes [ j ] ] . Key ) == true )
111
111
{
112
112
return true ;
113
113
}
@@ -118,7 +118,7 @@ async Task<bool> CheckCacheAndProcessResultAsync()
118
118
var results = await ( AreCachedAsync ( collectionKeys , indexes , collectionPersister , batchableCache , checkCache , cancellationToken ) ) . ConfigureAwait ( false ) ;
119
119
for ( var j = 0 ; j < results . Length ; j ++ )
120
120
{
121
- if ( ! results [ j ] && await ( ProcessKeyAsync ( collectionKeys [ indexes [ j ] ] . Key , true ) ) . ConfigureAwait ( false ) )
121
+ if ( ! results [ j ] && ProcessKey ( collectionKeys [ indexes [ j ] ] . Key , true ) == true )
122
122
{
123
123
return true ;
124
124
}
@@ -132,91 +132,89 @@ async Task<bool> CheckCacheAndProcessResultAsync()
132
132
return false ;
133
133
}
134
134
135
- Task < bool > ProcessKeyAsync ( KeyValuePair < CollectionEntry , IPersistentCollection > me , bool ignoreCache = false )
135
+ async Task < bool > ProcessKeyAndCheckCacheAsync ( KeyValuePair < CollectionEntry , IPersistentCollection > me )
136
136
{
137
- try
137
+ return ProcessKey ( me ) ?? await ( CheckCacheAndProcessResultAsync ( ) ) . ConfigureAwait ( false ) ;
138
+ }
139
+
140
+ bool ? ProcessKey ( KeyValuePair < CollectionEntry , IPersistentCollection > me , bool ignoreCache = false )
141
+ {
142
+ var ce = me . Key ;
143
+ var collection = me . Value ;
144
+ if ( ce . LoadedKey == null )
138
145
{
139
- var ce = me . Key ;
140
- var collection = me . Value ;
141
- if ( ce . LoadedKey == null )
142
- {
143
- // the LoadedKey of the CollectionEntry might be null as it might have been reset to null
144
- // (see for example Collections.ProcessDereferencedCollection()
145
- // and CollectionEntry.AfterAction())
146
- // though we clear the queue on flush, it seems like a good idea to guard
147
- // against potentially null LoadedKey:s
148
- return Task . FromResult < bool > ( false ) ;
149
- }
146
+ // the LoadedKey of the CollectionEntry might be null as it might have been reset to null
147
+ // (see for example Collections.ProcessDereferencedCollection()
148
+ // and CollectionEntry.AfterAction())
149
+ // though we clear the queue on flush, it seems like a good idea to guard
150
+ // against potentially null LoadedKey:s
151
+ return false ;
152
+ }
150
153
151
- if ( collection . WasInitialized )
152
- {
153
- log . Warn ( "Encountered initialized collection in BatchFetchQueue, this should not happen." ) ;
154
- return Task . FromResult < bool > ( false ) ;
155
- }
154
+ if ( collection . WasInitialized )
155
+ {
156
+ log . Warn ( "Encountered initialized collection in BatchFetchQueue, this should not happen." ) ;
157
+ return false ;
158
+ }
156
159
157
- if ( checkForEnd && ( index == map . Count || index >= keyIndex . Value + batchSize ) )
160
+ if ( checkForEnd && ( index == map . Count || index >= keyIndex . Value + batchSize ) )
161
+ {
162
+ return true ;
163
+ }
164
+ if ( collectionPersister . KeyType . IsEqual ( key , ce . LoadedKey , collectionPersister . Factory ) )
165
+ {
166
+ if ( collectionEntries != null )
158
167
{
159
- return Task . FromResult < bool > ( true ) ;
168
+ collectionEntries [ 0 ] = ce ;
160
169
}
161
- if ( collectionPersister . KeyType . IsEqual ( key , ce . LoadedKey , collectionPersister . Factory ) )
170
+ keyIndex = index ;
171
+ }
172
+ else if ( ! checkCache || batchableCache == null )
173
+ {
174
+ if ( index < map . Count && ( ! keyIndex . HasValue || index < keyIndex . Value ) )
162
175
{
163
- if ( collectionEntries != null )
164
- {
165
- collectionEntries [ 0 ] = ce ;
166
- }
167
- keyIndex = index ;
176
+ collectionKeys . Add ( new KeyValuePair < KeyValuePair < CollectionEntry , IPersistentCollection > , int > ( me , index ) ) ;
177
+ return false ;
168
178
}
169
- else if ( ! checkCache || batchableCache == null )
170
- {
171
- if ( index < map . Count && ( ! keyIndex . HasValue || index < keyIndex . Value ) )
172
- {
173
- collectionKeys . Add ( new KeyValuePair < KeyValuePair < CollectionEntry , IPersistentCollection > , int > ( me , index ) ) ;
174
- return Task . FromResult < bool > ( false ) ;
175
- }
176
179
177
- // No need to check "!checkCache || !IsCached(ce.LoadedKey, collectionPersister)":
178
- // "batchableCache == null" already means there is no cache, so IsCached can only yield false.
179
- // (This method is now removed.)
180
- if ( collectionEntries != null )
181
- {
182
- collectionEntries [ i ] = ce ;
183
- }
184
- keys [ i ++ ] = ce . LoadedKey ;
185
- }
186
- else if ( ignoreCache )
180
+ // No need to check "!checkCache || !IsCached(ce.LoadedKey, collectionPersister)":
181
+ // "batchableCache == null" already means there is no cache, so IsCached can only yield false.
182
+ // (This method is now removed.)
183
+ if ( collectionEntries != null )
187
184
{
188
- if ( collectionEntries != null )
189
- {
190
- collectionEntries [ i ] = ce ;
191
- }
192
- keys [ i ++ ] = ce . LoadedKey ;
185
+ collectionEntries [ i ] = ce ;
193
186
}
194
- else
187
+ keys [ i ++ ] = ce . LoadedKey ;
188
+ }
189
+ else if ( ignoreCache )
190
+ {
191
+ if ( collectionEntries != null )
195
192
{
196
- collectionKeys . Add ( new KeyValuePair < KeyValuePair < CollectionEntry , IPersistentCollection > , int > ( me , index ) ) ;
197
- // Check the cache only when we have collected as many keys as are needed to fill the batch,
198
- // that are after the demanded key.
199
- if ( ! keyIndex . HasValue || index < keyIndex . Value + batchSize )
200
- {
201
- return Task . FromResult < bool > ( false ) ;
202
- }
203
- return CheckCacheAndProcessResultAsync ( ) ;
193
+ collectionEntries [ i ] = ce ;
204
194
}
205
- if ( i == batchSize )
195
+ keys [ i ++ ] = ce . LoadedKey ;
196
+ }
197
+ else
198
+ {
199
+ collectionKeys . Add ( new KeyValuePair < KeyValuePair < CollectionEntry , IPersistentCollection > , int > ( me , index ) ) ;
200
+ // Check the cache only when we have collected as many keys as are needed to fill the batch,
201
+ // that are after the demanded key.
202
+ if ( ! keyIndex . HasValue || index < keyIndex . Value + batchSize )
206
203
{
207
- i = 1 ; // End of array, start filling again from start
208
- if ( index == map . Count || keyIndex . HasValue )
209
- {
210
- checkForEnd = true ;
211
- return Task . FromResult < bool > ( index == map . Count || index >= keyIndex . Value + batchSize ) ;
212
- }
204
+ return false ;
213
205
}
214
- return Task . FromResult < bool > ( false ) ;
206
+ return null ;
215
207
}
216
- catch ( Exception ex )
208
+ if ( i == batchSize )
217
209
{
218
- return Task . FromException < bool > ( ex ) ;
210
+ i = 1 ; // End of array, start filling again from start
211
+ if ( index == map . Count || keyIndex . HasValue )
212
+ {
213
+ checkForEnd = true ;
214
+ return index == map . Count || index >= keyIndex . Value + batchSize ;
215
+ }
219
216
}
217
+ return false ;
220
218
}
221
219
}
222
220
0 commit comments