@@ -23,7 +23,7 @@ namespace LazyCacheHelpers
2323 /// NOTE: A wrapper implementation for MemoryCache is implemented (via default ICacheRepository implementation as LazyDotNetMemoryCacheRepository) to
2424 /// make working with MemoryCache with greatly simplified support for self-populating (Lazy) initialization.
2525 /// </summary>
26- public class LazyCacheHandler < TValue > : ILazyCacheHandler < TValue > where TValue : class
26+ public class LazyCacheHandler < TValue > : ILazyCacheHandler < TValue > , ILazyCacheHandlerSelfExpiringResults < TValue > where TValue : class
2727 {
2828 //Added methods to CacheHelper to work with MemoryCache more easily.
2929 //NOTE: .Net MemoryCache supports this does NOT support Garbage Collection and Resource Reclaiming so it should
@@ -46,8 +46,14 @@ public LazyCacheHandler()
4646 { }
4747
4848 /// <summary>
49- /// BBernard
50- /// A wrapper implementation for ICacheRepository to make working with Thread Safety significantly easier.
49+ /// This overload enables dynamic self-populating cache retrieval of the results on any result generated by the specified
50+ /// cache item/result factory Func<T>. Using the Cache Item Expiration Policy provided the cache will ensure that
51+ /// the work is only ever performed by one and only one request, whereby all other simultaneous requests will immediately
52+ /// benefit from the work and receive the cached item once it's generated.
53+ ///
54+ /// In this overload the CacheItem policy is known before executing the logic so it is consistent for all calls to this method.
55+ ///
56+ /// Ultimately this is a wrapper implementation for ICacheRepository to make working with Thread Safety significantly easier.
5157 /// This provides completely ThreadSafe cache with Lazy Loading capabilities in an easy to use function;
5258 /// Lazy<typeparamref name="TValue"/> loading facilitates self-populating cache so that the long running processes are never
5359 /// executed more than once, even if they are triggered at approx. the same time.
@@ -59,11 +65,11 @@ public LazyCacheHandler()
5965 /// https://blog.falafel.com/working-system-runtime-caching-memorycache/
6066 /// </summary>
6167 /// <typeparam name="TKey"></typeparam>
62- /// <typeparam name="T"></typeparam>
6368 /// <param name="key"></param>
6469 /// <param name="fnValueFactory"></param>
6570 /// <param name="cacheItemPolicy"></param>
6671 /// <returns></returns>
72+ /// <exception cref="ArgumentNullException"></exception>
6773 public virtual TValue GetOrAddFromCache < TKey > ( TKey key , Func < TValue > fnValueFactory , CacheItemPolicy cacheItemPolicy )
6874 {
6975 //We support either ILazyCacheKey interface or any object for the Cache Key as long as it's ToString()
@@ -99,9 +105,54 @@ public virtual TValue GetOrAddFromCache<TKey>(TKey key, Func<TValue> fnValueFact
99105 }
100106
101107 /// <summary>
102- /// BBernard
108+ /// This overload enables dynamic self-populating cache retrieval whereby the actual cache item logic also returns
109+ /// the cache expiration policy in addition to the cache item result. This is very useful in cases such as Auth tokens,
110+ /// and external API calls whereby the response contains information about how long the data returned is valid. And therefore
111+ /// the response can be used to construct a highly optimized Cache Expiration Policy based on the data returned -- rather than
112+ /// simply guessing and/or hard coding how long the data is valid for.
113+ /// </summary>
114+ /// <typeparam name="TKey"></typeparam>
115+ /// <param name="key"></param>
116+ /// <param name="fnValueFactory"></param>
117+ /// <returns></returns>
118+ /// <exception cref="ArgumentNullException"></exception>
119+ public TValue GetOrAddFromCache < TKey > ( TKey key , Func < ILazySelfExpiringCacheResult < TValue > > fnValueFactory )
120+ {
121+ if ( fnValueFactory == null )
122+ throw new ArgumentNullException ( nameof ( fnValueFactory ) ) ;
123+
124+ //TODO: WIP: TEST THAT this REF Approach works...
125+ CacheItemPolicy cacheItemPolicyFromResultRef = null ;
126+ return GetOrAddFromCache ( key , ( ) =>
127+ {
128+ //Execute the original Cache Factory method...
129+ var selfExpiringCacheResult = fnValueFactory . Invoke ( ) ;
130+ //Now unwrap the results and set our CacheItemPolicy Reference from the result into our captured ref...
131+ cacheItemPolicyFromResultRef = selfExpiringCacheResult . CachePolicy ;
132+
133+ //Validate that the returned policy is valid; otherwise we used a Disabled Cache Policy fallback to ensure
134+ // that the code still runs without issue...
135+ if ( ! LazyCachePolicy . IsPolicyEnabled ( cacheItemPolicyFromResultRef ) )
136+ cacheItemPolicyFromResultRef = LazyCachePolicy . DisabledCachingPolicy ;
137+
138+ //Finally return the actual result just as a normal cache item factory would...
139+ return selfExpiringCacheResult . CacheItem ;
140+ } ,
141+ //Here we pass in our Ref that will be dynamically updated by the cache item factory if it executes,
142+ // otherwise this will not be used since the value is loaded from the cache...
143+ cacheItemPolicyFromResultRef
144+ ) ;
145+ }
146+
147+ /// <summary>
148+ /// This overload enables async dynamic self-populating cache retrieval of the results on any result generated by the specified
149+ /// cache item/result factory Func<T>. Using the Cache Item Expiration Policy provided the cache will ensure that
150+ /// the work is only ever performed by one and only one request, whereby all other simultaneous requests will immediately
151+ /// benefit from the work and receive the cached item once it's generated.
103152 ///
104- /// An Async wrapper implementation for using ICacheRepository to make working with Thread Safety for
153+ /// In this overload the CacheItem policy is known before executing the logic so it is consistent for all calls to this method.
154+ ///
155+ /// Ultimately this is an Async wrapper implementation for using ICacheRepository to make working with Thread Safety for
105156 /// Asynchronous processes significantly easier. This provides completely ThreadSafe Async cache with Lazy Loading capabilities
106157 /// in an easy to use function; Lazy<typeparamref name="TValue"/> loading facilitates self-populating cache so that the long running processes are never
107158 /// executed more than once, even if they are triggered at approx. the same time.
@@ -123,14 +174,18 @@ public virtual TValue GetOrAddFromCache<TKey>(TKey key, Func<TValue> fnValueFact
123174 /// the Async/Await Task Based Asynchronous pattern from top to bottom and even with our caching!
124175 ///
125176 /// </summary>
126- /// <typeparam name="T "></typeparam>
177+ /// <typeparam name="TKey "></typeparam>
127178 /// <param name="key"></param>
128179 /// <param name="fnAsyncValueFactory"></param>
129180 /// <param name="cacheItemPolicy"></param>
130181 /// <returns></returns>
182+ /// <exception cref="ArgumentNullException"></exception>
131183 public virtual async Task < TValue > GetOrAddFromCacheAsync < TKey > ( TKey key , Func < Task < TValue > > fnAsyncValueFactory , CacheItemPolicy cacheItemPolicy )
132184 {
133- //We support eitehr ILazyCacheKey interface or any object for the Cache Key as long as it's ToString()
185+ if ( fnAsyncValueFactory == null )
186+ throw new ArgumentNullException ( nameof ( fnAsyncValueFactory ) ) ;
187+
188+ //We support either ILazyCacheKey interface or any object for the Cache Key as long as it's ToString()
134189 // implementation creates a valid unique Key for us, so here we initialize the Cache Key to use.
135190 string cacheKey = GenerateCacheKeyHelper ( key ) ;
136191
@@ -179,6 +234,46 @@ public virtual async Task<TValue> GetOrAddFromCacheAsync<TKey>(TKey key, Func<Ta
179234 }
180235 }
181236
237+ /// <summary>
238+ /// This overload enables async dynamic self-populating cache retrieval whereby the actual cache item logic also returns
239+ /// the cache expiration policy in addition to the cache item result. This is very useful in cases such as Auth tokens,
240+ /// and external API calls whereby the response contains information about how long the data returned is valid. And therefore
241+ /// the response can be used to construct a highly optimized Cache Expiration Policy based on the data returned -- rather than
242+ /// simply guessing and/or hard coding how long the data is valid for.
243+ /// </summary>
244+ /// <typeparam name="TKey"></typeparam>
245+ /// <param name="key"></param>
246+ /// <param name="fnAsyncValueFactory"></param>
247+ /// <returns></returns>
248+ /// <exception cref="ArgumentNullException"></exception>
249+ public Task < TValue > GetOrAddFromCacheAsync < TKey > ( TKey key , Func < Task < ILazySelfExpiringCacheResult < TValue > > > fnAsyncValueFactory )
250+ {
251+ if ( fnAsyncValueFactory == null )
252+ throw new ArgumentNullException ( nameof ( fnAsyncValueFactory ) ) ;
253+
254+ //TODO: WIP: TEST THAT this REF Approach works...
255+ CacheItemPolicy cacheItemPolicyFromResultRef = null ;
256+ return GetOrAddFromCacheAsync ( key , async ( ) =>
257+ {
258+ //Execute the original Cache Factory method...
259+ var selfExpiringCacheResult = await fnAsyncValueFactory . Invoke ( ) ;
260+ //Now unwrap the results and set our CacheItemPolicy Reference from the result into our captured ref...
261+ cacheItemPolicyFromResultRef = selfExpiringCacheResult . CachePolicy ;
262+
263+ //Validate that the returned policy is valid; otherwise we used a Disabled Cache Policy fallback to ensure
264+ // that the code still runs without issue...
265+ if ( ! LazyCachePolicy . IsPolicyEnabled ( cacheItemPolicyFromResultRef ) )
266+ cacheItemPolicyFromResultRef = LazyCachePolicy . DisabledCachingPolicy ;
267+
268+ //Finally return the actual result just as a normal cache item factory would...
269+ return selfExpiringCacheResult . CacheItem ;
270+ } ,
271+ //Here we pass in our Ref that will be dynamically updated by the cache item factory if it executes,
272+ // otherwise this will not be used since the value is loaded from the cache...
273+ cacheItemPolicyFromResultRef
274+ ) ;
275+ }
276+
182277 /// <summary>
183278 /// BBernard
184279 ///
@@ -232,6 +327,5 @@ protected virtual string GenerateCacheKeyHelper<TKey>(TKey cacheKeyGenerator)
232327 }
233328
234329 #endregion
235-
236330 }
237331}
0 commit comments