https://github.com/python/cpython/commit/6fcac09401e336b25833dcef2610d498e73b27a1
commit: 6fcac09401e336b25833dcef2610d498e73b27a1
branch: main
author: Raymond Hettinger <[email protected]>
committer: rhettinger <[email protected]>
date: 2025-08-23T15:18:46Z
summary:

Improve readability by adding whitespace between code paragraphs (gh-138090)

Improve readability by adding whitespace between code paragraphs.

files:
M Lib/functools.py

diff --git a/Lib/functools.py b/Lib/functools.py
index 7f0eac3f650209..a92844ba7227b0 100644
--- a/Lib/functools.py
+++ b/Lib/functools.py
@@ -580,12 +580,14 @@ def lru_cache(maxsize=128, typed=False):
         # Negative maxsize is treated as 0
         if maxsize < 0:
             maxsize = 0
+
     elif callable(maxsize) and isinstance(typed, bool):
         # The user_function was passed in directly via the maxsize argument
         user_function, maxsize = maxsize, 128
         wrapper = _lru_cache_wrapper(user_function, maxsize, typed, _CacheInfo)
         wrapper.cache_parameters = lambda : {'maxsize': maxsize, 'typed': 
typed}
         return update_wrapper(wrapper, user_function)
+
     elif maxsize is not None:
         raise TypeError(
             'Expected first argument to be an integer, a callable, or None')
@@ -617,6 +619,7 @@ def _lru_cache_wrapper(user_function, maxsize, typed, 
_CacheInfo):
         def wrapper(*args, **kwds):
             # No caching -- just a statistics update
             nonlocal misses
+
             misses += 1
             result = user_function(*args, **kwds)
             return result
@@ -626,6 +629,7 @@ def wrapper(*args, **kwds):
         def wrapper(*args, **kwds):
             # Simple caching without ordering or size limit
             nonlocal hits, misses
+
             key = make_key(args, kwds, typed)
             result = cache_get(key, sentinel)
             if result is not sentinel:
@@ -641,7 +645,9 @@ def wrapper(*args, **kwds):
         def wrapper(*args, **kwds):
             # Size limited caching that tracks accesses by recency
             nonlocal root, hits, misses, full
+
             key = make_key(args, kwds, typed)
+
             with lock:
                 link = cache_get(key)
                 if link is not None:
@@ -656,7 +662,9 @@ def wrapper(*args, **kwds):
                     hits += 1
                     return result
                 misses += 1
+
             result = user_function(*args, **kwds)
+
             with lock:
                 if key in cache:
                     # Getting here means that this same key was added to the
@@ -664,11 +672,13 @@ def wrapper(*args, **kwds):
                     # update is already done, we need only return the
                     # computed result and update the count of misses.
                     pass
+
                 elif full:
                     # Use the old root to store the new key and result.
                     oldroot = root
                     oldroot[KEY] = key
                     oldroot[RESULT] = result
+
                     # Empty the oldest link and make it the new root.
                     # Keep a reference to the old key and old result to
                     # prevent their ref counts from going to zero during the
@@ -679,20 +689,25 @@ def wrapper(*args, **kwds):
                     oldkey = root[KEY]
                     oldresult = root[RESULT]
                     root[KEY] = root[RESULT] = None
+
                     # Now update the cache dictionary.
                     del cache[oldkey]
+
                     # Save the potentially reentrant cache[key] assignment
                     # for last, after the root and links have been put in
                     # a consistent state.
                     cache[key] = oldroot
+
                 else:
                     # Put result in a new link at the front of the queue.
                     last = root[PREV]
                     link = [last, root, key, result]
                     last[NEXT] = root[PREV] = cache[key] = link
+
                     # Use the cache_len bound method instead of the len() 
function
                     # which could potentially be wrapped in an lru_cache 
itself.
                     full = (cache_len() >= maxsize)
+
             return result
 
     def cache_info():
@@ -703,6 +718,7 @@ def cache_info():
     def cache_clear():
         """Clear the cache and cache statistics"""
         nonlocal hits, misses, full
+
         with lock:
             cache.clear()
             root[:] = [root, root, None, None]

_______________________________________________
Python-checkins mailing list -- [email protected]
To unsubscribe send an email to [email protected]
https://mail.python.org/mailman3//lists/python-checkins.python.org
Member address: [email protected]

Reply via email to