@@ -580,12 +580,14 @@ def lru_cache(maxsize=128, typed=False):
580580 # Negative maxsize is treated as 0
581581 if maxsize < 0 :
582582 maxsize = 0
583+
583584 elif callable (maxsize ) and isinstance (typed , bool ):
584585 # The user_function was passed in directly via the maxsize argument
585586 user_function , maxsize = maxsize , 128
586587 wrapper = _lru_cache_wrapper (user_function , maxsize , typed , _CacheInfo )
587588 wrapper .cache_parameters = lambda : {'maxsize' : maxsize , 'typed' : typed }
588589 return update_wrapper (wrapper , user_function )
590+
589591 elif maxsize is not None :
590592 raise TypeError (
591593 'Expected first argument to be an integer, a callable, or None' )
@@ -617,6 +619,7 @@ def _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo):
617619 def wrapper (* args , ** kwds ):
618620 # No caching -- just a statistics update
619621 nonlocal misses
622+
620623 misses += 1
621624 result = user_function (* args , ** kwds )
622625 return result
@@ -626,6 +629,7 @@ def wrapper(*args, **kwds):
626629 def wrapper (* args , ** kwds ):
627630 # Simple caching without ordering or size limit
628631 nonlocal hits , misses
632+
629633 key = make_key (args , kwds , typed )
630634 result = cache_get (key , sentinel )
631635 if result is not sentinel :
@@ -641,7 +645,9 @@ def wrapper(*args, **kwds):
641645 def wrapper (* args , ** kwds ):
642646 # Size limited caching that tracks accesses by recency
643647 nonlocal root , hits , misses , full
648+
644649 key = make_key (args , kwds , typed )
650+
645651 with lock :
646652 link = cache_get (key )
647653 if link is not None :
@@ -656,19 +662,23 @@ def wrapper(*args, **kwds):
656662 hits += 1
657663 return result
658664 misses += 1
665+
659666 result = user_function (* args , ** kwds )
667+
660668 with lock :
661669 if key in cache :
662670 # Getting here means that this same key was added to the
663671 # cache while the lock was released. Since the link
664672 # update is already done, we need only return the
665673 # computed result and update the count of misses.
666674 pass
675+
667676 elif full :
668677 # Use the old root to store the new key and result.
669678 oldroot = root
670679 oldroot [KEY ] = key
671680 oldroot [RESULT ] = result
681+
672682 # Empty the oldest link and make it the new root.
673683 # Keep a reference to the old key and old result to
674684 # prevent their ref counts from going to zero during the
@@ -679,20 +689,25 @@ def wrapper(*args, **kwds):
679689 oldkey = root [KEY ]
680690 oldresult = root [RESULT ]
681691 root [KEY ] = root [RESULT ] = None
692+
682693 # Now update the cache dictionary.
683694 del cache [oldkey ]
695+
684696 # Save the potentially reentrant cache[key] assignment
685697 # for last, after the root and links have been put in
686698 # a consistent state.
687699 cache [key ] = oldroot
700+
688701 else :
689702 # Put result in a new link at the front of the queue.
690703 last = root [PREV ]
691704 link = [last , root , key , result ]
692705 last [NEXT ] = root [PREV ] = cache [key ] = link
706+
693707 # Use the cache_len bound method instead of the len() function
694708 # which could potentially be wrapped in an lru_cache itself.
695709 full = (cache_len () >= maxsize )
710+
696711 return result
697712
698713 def cache_info ():
@@ -703,6 +718,7 @@ def cache_info():
703718 def cache_clear ():
704719 """Clear the cache and cache statistics"""
705720 nonlocal hits , misses , full
721+
706722 with lock :
707723 cache .clear ()
708724 root [:] = [root , root , None , None ]
0 commit comments