Locking pattern for proper use of .NET MemoryCache

前端 未结 9 2076
栀梦
栀梦 2020-11-30 17:13

I assume this code has concurrency issues:

const string CacheKey = \"CacheKey\";
static string GetCachedData()
{
    string expensiveString =null;
    if (Me         


        
相关标签:
9条回答
  • 2020-11-30 17:52

    Console example of MemoryCache, "How to save/get simple class objects"

    Output after launching and pressing Any key except Esc :

    Saving to cache!
    Getting from cache!
    Some1
    Some2

        class Some
        {
            public String text { get; set; }
    
            public Some(String text)
            {
                this.text = text;
            }
    
            public override string ToString()
            {
                return text;
            }
        }
    
        public static MemoryCache cache = new MemoryCache("cache");
    
        public static string cache_name = "mycache";
    
        static void Main(string[] args)
        {
    
            Some some1 = new Some("some1");
            Some some2 = new Some("some2");
    
            List<Some> list = new List<Some>();
            list.Add(some1);
            list.Add(some2);
    
            do {
    
                if (cache.Contains(cache_name))
                {
                    Console.WriteLine("Getting from cache!");
                    List<Some> list_c = cache.Get(cache_name) as List<Some>;
                    foreach (Some s in list_c) Console.WriteLine(s);
                }
                else
                {
                    Console.WriteLine("Saving to cache!");
                    cache.Set(cache_name, list, DateTime.Now.AddMinutes(10));                   
                }
    
            } while (Console.ReadKey(true).Key != ConsoleKey.Escape);
    
        }
    
    0 讨论(0)
  • 2020-11-30 17:57

    I've solved this issue by making use of the AddOrGetExisting method on the MemoryCache and the use of Lazy initialization.

    Essentially, my code looks something like this:

    static string GetCachedData(string key, DateTimeOffset offset)
    {
        Lazy<String> lazyObject = new Lazy<String>(() => SomeHeavyAndExpensiveCalculationThatReturnsAString());
        var returnedLazyObject = MemoryCache.Default.AddOrGetExisting(key, lazyObject, offset); 
        if (returnedLazyObject == null)
           return lazyObject.Value;
        return ((Lazy<String>) returnedLazyObject).Value;
    }
    

    Worst case scenario here is that you create the same Lazy object twice. But that is pretty trivial. The use of AddOrGetExisting guarantees that you'll only ever get one instance of the Lazy object, and so you're also guaranteed to only call the expensive initialization method once.

    0 讨论(0)
  • 2020-11-30 17:59

    Its a bit late, however... Full implementation:

        [HttpGet]
        public async Task<HttpResponseMessage> GetPageFromUriOrBody(RequestQuery requestQuery)
        {
            log(nameof(GetPageFromUriOrBody), nameof(requestQuery));
            var responseResult = await _requestQueryCache.GetOrCreate(
                nameof(GetPageFromUriOrBody)
                , requestQuery
                , (x) => getPageContent(x).Result);
            return Request.CreateResponse(System.Net.HttpStatusCode.Accepted, responseResult);
        }
        static MemoryCacheWithPolicy<RequestQuery, string> _requestQueryCache = new MemoryCacheWithPolicy<RequestQuery, string>();
    

    Here is getPageContent signature:

    async Task<string> getPageContent(RequestQuery requestQuery);
    

    And here is the MemoryCacheWithPolicy implementation:

    public class MemoryCacheWithPolicy<TParameter, TResult>
    {
        static ILogger _nlogger = new AppLogger().Logger;
        private MemoryCache _cache = new MemoryCache(new MemoryCacheOptions() 
        {
            //Size limit amount: this is actually a memory size limit value!
            SizeLimit = 1024 
        });
    
        /// <summary>
        /// Gets or creates a new memory cache record for a main data
        /// along with parameter data that is assocciated with main main.
        /// </summary>
        /// <param name="key">Main data cache memory key.</param>
        /// <param name="param">Parameter model that assocciated to main model (request result).</param>
        /// <param name="createCacheData">A delegate to create a new main data to cache.</param>
        /// <returns></returns>
        public async Task<TResult> GetOrCreate(object key, TParameter param, Func<TParameter, TResult> createCacheData)
        {
            // this key is used for param cache memory.
            var paramKey = key + nameof(param);
    
            if (!_cache.TryGetValue(key, out TResult cacheEntry))
            {
                // key is not in the cache, create data through the delegate.
                cacheEntry = createCacheData(param);
                createMemoryCache(key, cacheEntry, paramKey, param);
    
                _nlogger.Warn(" cache is created.");
            }
            else
            {
                // data is chached so far..., check if param model is same (or changed)?
                if(!_cache.TryGetValue(paramKey, out TParameter cacheParam))
                {
                    //exception: this case should not happened!
                }
    
                if (!cacheParam.Equals(param))
                {
                    // request param is changed, create data through the delegate.
                    cacheEntry = createCacheData(param);
                    createMemoryCache(key, cacheEntry, paramKey, param);
                    _nlogger.Warn(" cache is re-created (param model has been changed).");
                }
                else
                {
                    _nlogger.Trace(" cache is used.");
                }
    
            }
            return await Task.FromResult<TResult>(cacheEntry);
        }
        MemoryCacheEntryOptions createMemoryCacheEntryOptions(TimeSpan slidingOffset, TimeSpan relativeOffset)
        {
            // Cache data within [slidingOffset] seconds, 
            // request new result after [relativeOffset] seconds.
            return new MemoryCacheEntryOptions()
    
                // Size amount: this is actually an entry count per 
                // key limit value! not an actual memory size value!
                .SetSize(1)
    
                // Priority on removing when reaching size limit (memory pressure)
                .SetPriority(CacheItemPriority.High)
    
                // Keep in cache for this amount of time, reset it if accessed.
                .SetSlidingExpiration(slidingOffset)
    
                // Remove from cache after this time, regardless of sliding expiration
                .SetAbsoluteExpiration(relativeOffset);
            //
        }
        void createMemoryCache(object key, TResult cacheEntry, object paramKey, TParameter param)
        {
            // Cache data within 2 seconds, 
            // request new result after 5 seconds.
            var cacheEntryOptions = createMemoryCacheEntryOptions(
                TimeSpan.FromSeconds(2)
                , TimeSpan.FromSeconds(5));
    
            // Save data in cache.
            _cache.Set(key, cacheEntry, cacheEntryOptions);
    
            // Save param in cache.
            _cache.Set(paramKey, param, cacheEntryOptions);
        }
        void checkCacheEntry<T>(object key, string name)
        {
            _cache.TryGetValue(key, out T value);
            _nlogger.Fatal("Key: {0}, Name: {1}, Value: {2}", key, name, value);
        }
    }
    

    nlogger is just nLog object to trace MemoryCacheWithPolicy behavior. I re-create the memory cache if request object (RequestQuery requestQuery) is changed through the delegate (Func<TParameter, TResult> createCacheData) or re-create when sliding or absolute time reached their limit. Note that everything is async too ;)

    0 讨论(0)
提交回复
热议问题