Skip to content

Commit aa4e0ae

Browse files
committed
Avoid recursive calls in BatchFetchQueue
1 parent 109f085 commit aa4e0ae

File tree

2 files changed

+78
-75
lines changed

2 files changed

+78
-75
lines changed

src/NHibernate/Async/Engine/BatchFetchQueue.cs

Lines changed: 68 additions & 70 deletions
Original file line numberDiff line numberDiff line change
@@ -74,7 +74,7 @@ internal async Task<object[]> GetCollectionBatchAsync(ICollectionPersister colle
7474
foreach (KeyValuePair<CollectionEntry, IPersistentCollection> me in map)
7575
{
7676
cancellationToken.ThrowIfCancellationRequested();
77-
if (await (ProcessKeyAsync(me)).ConfigureAwait(false))
77+
if (await (ProcessKeyAndCheckCacheAsync(me)).ConfigureAwait(false))
7878
{
7979
return keys;
8080
}
@@ -107,7 +107,7 @@ async Task<bool> CheckCacheAndProcessResultAsync()
107107
{
108108
for (var j = 0; j < collectionKeys.Count; j++)
109109
{
110-
if (await (ProcessKeyAsync(collectionKeys[indexes[j]].Key)).ConfigureAwait(false))
110+
if (ProcessKey(collectionKeys[indexes[j]].Key) == true)
111111
{
112112
return true;
113113
}
@@ -118,7 +118,7 @@ async Task<bool> CheckCacheAndProcessResultAsync()
118118
var results = await (AreCachedAsync(collectionKeys, indexes, collectionPersister, batchableCache, checkCache, cancellationToken)).ConfigureAwait(false);
119119
for (var j = 0; j < results.Length; j++)
120120
{
121-
if (!results[j] && await (ProcessKeyAsync(collectionKeys[indexes[j]].Key, true)).ConfigureAwait(false))
121+
if (!results[j] && ProcessKey(collectionKeys[indexes[j]].Key, true) == true)
122122
{
123123
return true;
124124
}
@@ -132,91 +132,89 @@ async Task<bool> CheckCacheAndProcessResultAsync()
132132
return false;
133133
}
134134

135-
Task<bool> ProcessKeyAsync(KeyValuePair<CollectionEntry, IPersistentCollection> me, bool ignoreCache = false)
135+
async Task<bool> ProcessKeyAndCheckCacheAsync(KeyValuePair<CollectionEntry, IPersistentCollection> me)
136136
{
137-
try
137+
return ProcessKey(me) ?? await (CheckCacheAndProcessResultAsync()).ConfigureAwait(false);
138+
}
139+
140+
bool? ProcessKey(KeyValuePair<CollectionEntry, IPersistentCollection> me, bool ignoreCache = false)
141+
{
142+
var ce = me.Key;
143+
var collection = me.Value;
144+
if (ce.LoadedKey == null)
138145
{
139-
var ce = me.Key;
140-
var collection = me.Value;
141-
if (ce.LoadedKey == null)
142-
{
143-
// the LoadedKey of the CollectionEntry might be null as it might have been reset to null
144-
// (see for example Collections.ProcessDereferencedCollection()
145-
// and CollectionEntry.AfterAction())
146-
// though we clear the queue on flush, it seems like a good idea to guard
147-
// against potentially null LoadedKey:s
148-
return Task.FromResult<bool>(false);
149-
}
146+
// the LoadedKey of the CollectionEntry might be null as it might have been reset to null
147+
// (see for example Collections.ProcessDereferencedCollection()
148+
// and CollectionEntry.AfterAction())
149+
// though we clear the queue on flush, it seems like a good idea to guard
150+
// against potentially null LoadedKey:s
151+
return false;
152+
}
150153

151-
if (collection.WasInitialized)
152-
{
153-
log.Warn("Encountered initialized collection in BatchFetchQueue, this should not happen.");
154-
return Task.FromResult<bool>(false);
155-
}
154+
if (collection.WasInitialized)
155+
{
156+
log.Warn("Encountered initialized collection in BatchFetchQueue, this should not happen.");
157+
return false;
158+
}
156159

157-
if (checkForEnd && (index == map.Count || index >= keyIndex.Value + batchSize))
160+
if (checkForEnd && (index == map.Count || index >= keyIndex.Value + batchSize))
161+
{
162+
return true;
163+
}
164+
if (collectionPersister.KeyType.IsEqual(key, ce.LoadedKey, collectionPersister.Factory))
165+
{
166+
if (collectionEntries != null)
158167
{
159-
return Task.FromResult<bool>(true);
168+
collectionEntries[0] = ce;
160169
}
161-
if (collectionPersister.KeyType.IsEqual(key, ce.LoadedKey, collectionPersister.Factory))
170+
keyIndex = index;
171+
}
172+
else if (!checkCache || batchableCache == null)
173+
{
174+
if (index < map.Count && (!keyIndex.HasValue || index < keyIndex.Value))
162175
{
163-
if (collectionEntries != null)
164-
{
165-
collectionEntries[0] = ce;
166-
}
167-
keyIndex = index;
176+
collectionKeys.Add(new KeyValuePair<KeyValuePair<CollectionEntry, IPersistentCollection>, int>(me, index));
177+
return false;
168178
}
169-
else if (!checkCache || batchableCache == null)
170-
{
171-
if (index < map.Count && (!keyIndex.HasValue || index < keyIndex.Value))
172-
{
173-
collectionKeys.Add(new KeyValuePair<KeyValuePair<CollectionEntry, IPersistentCollection>, int>(me, index));
174-
return Task.FromResult<bool>(false);
175-
}
176179

177-
// No need to check "!checkCache || !IsCached(ce.LoadedKey, collectionPersister)":
178-
// "batchableCache == null" already means there is no cache, so IsCached can only yield false.
179-
// (This method is now removed.)
180-
if (collectionEntries != null)
181-
{
182-
collectionEntries[i] = ce;
183-
}
184-
keys[i++] = ce.LoadedKey;
185-
}
186-
else if (ignoreCache)
180+
// No need to check "!checkCache || !IsCached(ce.LoadedKey, collectionPersister)":
181+
// "batchableCache == null" already means there is no cache, so IsCached can only yield false.
182+
// (This method is now removed.)
183+
if (collectionEntries != null)
187184
{
188-
if (collectionEntries != null)
189-
{
190-
collectionEntries[i] = ce;
191-
}
192-
keys[i++] = ce.LoadedKey;
185+
collectionEntries[i] = ce;
193186
}
194-
else
187+
keys[i++] = ce.LoadedKey;
188+
}
189+
else if (ignoreCache)
190+
{
191+
if (collectionEntries != null)
195192
{
196-
collectionKeys.Add(new KeyValuePair<KeyValuePair<CollectionEntry, IPersistentCollection>, int>(me, index));
197-
// Check the cache only when we have collected as many keys as are needed to fill the batch,
198-
// that are after the demanded key.
199-
if (!keyIndex.HasValue || index < keyIndex.Value + batchSize)
200-
{
201-
return Task.FromResult<bool>(false);
202-
}
203-
return CheckCacheAndProcessResultAsync();
193+
collectionEntries[i] = ce;
204194
}
205-
if (i == batchSize)
195+
keys[i++] = ce.LoadedKey;
196+
}
197+
else
198+
{
199+
collectionKeys.Add(new KeyValuePair<KeyValuePair<CollectionEntry, IPersistentCollection>, int>(me, index));
200+
// Check the cache only when we have collected as many keys as are needed to fill the batch,
201+
// that are after the demanded key.
202+
if (!keyIndex.HasValue || index < keyIndex.Value + batchSize)
206203
{
207-
i = 1; // End of array, start filling again from start
208-
if (index == map.Count || keyIndex.HasValue)
209-
{
210-
checkForEnd = true;
211-
return Task.FromResult<bool>(index == map.Count || index >= keyIndex.Value + batchSize);
212-
}
204+
return false;
213205
}
214-
return Task.FromResult<bool>(false);
206+
return null;
215207
}
216-
catch (Exception ex)
208+
if (i == batchSize)
217209
{
218-
return Task.FromException<bool>(ex);
210+
i = 1; // End of array, start filling again from start
211+
if (index == map.Count || keyIndex.HasValue)
212+
{
213+
checkForEnd = true;
214+
return index == map.Count || index >= keyIndex.Value + batchSize;
215+
}
219216
}
217+
return false;
220218
}
221219
}
222220

src/NHibernate/Engine/BatchFetchQueue.cs

Lines changed: 10 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -244,7 +244,7 @@ internal object[] GetCollectionBatch(ICollectionPersister collectionPersister, o
244244

245245
foreach (KeyValuePair<CollectionEntry, IPersistentCollection> me in map)
246246
{
247-
if (ProcessKey(me))
247+
if (ProcessKeyAndCheckCache(me))
248248
{
249249
return keys;
250250
}
@@ -276,7 +276,7 @@ bool CheckCacheAndProcessResult()
276276
{
277277
for (var j = 0; j < collectionKeys.Count; j++)
278278
{
279-
if (ProcessKey(collectionKeys[indexes[j]].Key))
279+
if (ProcessKey(collectionKeys[indexes[j]].Key) == true)
280280
{
281281
return true;
282282
}
@@ -287,7 +287,7 @@ bool CheckCacheAndProcessResult()
287287
var results = AreCached(collectionKeys, indexes, collectionPersister, batchableCache, checkCache);
288288
for (var j = 0; j < results.Length; j++)
289289
{
290-
if (!results[j] && ProcessKey(collectionKeys[indexes[j]].Key, true))
290+
if (!results[j] && ProcessKey(collectionKeys[indexes[j]].Key, true) == true)
291291
{
292292
return true;
293293
}
@@ -301,7 +301,12 @@ bool CheckCacheAndProcessResult()
301301
return false;
302302
}
303303

304-
bool ProcessKey(KeyValuePair<CollectionEntry, IPersistentCollection> me, bool ignoreCache = false)
304+
bool ProcessKeyAndCheckCache(KeyValuePair<CollectionEntry, IPersistentCollection> me)
305+
{
306+
return ProcessKey(me) ?? CheckCacheAndProcessResult();
307+
}
308+
309+
bool? ProcessKey(KeyValuePair<CollectionEntry, IPersistentCollection> me, bool ignoreCache = false)
305310
{
306311
var ce = me.Key;
307312
var collection = me.Value;
@@ -367,7 +372,7 @@ bool ProcessKey(KeyValuePair<CollectionEntry, IPersistentCollection> me, bool ig
367372
{
368373
return false;
369374
}
370-
return CheckCacheAndProcessResult();
375+
return null;
371376
}
372377
if (i == batchSize)
373378
{

0 commit comments

Comments
 (0)