https://github.com/python/cpython/commit/4c12a2db15639fe1b28f1f0f807a18fb7b71d851
commit: 4c12a2db15639fe1b28f1f0f807a18fb7b71d851
branch: main
author: Tomasz Pytel <tompy...@gmail.com>
committer: vstinner <vstin...@python.org>
date: 2025-04-14T18:31:19+02:00
summary:

gh-131757: allow lru_cache functions to execute concurrently (#131758)

files:
A Misc/NEWS.d/next/Library/2025-03-26-10-56-22.gh-issue-131757.pFRdmN.rst
M Include/internal/pycore_pyatomic_ft_wrappers.h
M Modules/_functoolsmodule.c

diff --git a/Include/internal/pycore_pyatomic_ft_wrappers.h 
b/Include/internal/pycore_pyatomic_ft_wrappers.h
index d755d03a5fa190..3e41e2fd1569ca 100644
--- a/Include/internal/pycore_pyatomic_ft_wrappers.h
+++ b/Include/internal/pycore_pyatomic_ft_wrappers.h
@@ -109,6 +109,8 @@ extern "C" {
     _Py_atomic_store_ullong_relaxed(&value, new_value)
 #define FT_ATOMIC_LOAD_ULLONG_RELAXED(value) \
     _Py_atomic_load_ullong_relaxed(&value)
+#define FT_ATOMIC_ADD_SSIZE(value, new_value) \
+    (void)_Py_atomic_add_ssize(&value, new_value)
 
 #else
 #define FT_ATOMIC_LOAD_PTR(value) value
@@ -156,6 +158,7 @@ extern "C" {
 #define FT_ATOMIC_STORE_LLONG_RELAXED(value, new_value) value = new_value
 #define FT_ATOMIC_LOAD_ULLONG_RELAXED(value) value
 #define FT_ATOMIC_STORE_ULLONG_RELAXED(value, new_value) value = new_value
+#define FT_ATOMIC_ADD_SSIZE(value, new_value) (void)(value += new_value)
 
 #endif
 
diff --git 
a/Misc/NEWS.d/next/Library/2025-03-26-10-56-22.gh-issue-131757.pFRdmN.rst 
b/Misc/NEWS.d/next/Library/2025-03-26-10-56-22.gh-issue-131757.pFRdmN.rst
new file mode 100644
index 00000000000000..89f71ca113aa3d
--- /dev/null
+++ b/Misc/NEWS.d/next/Library/2025-03-26-10-56-22.gh-issue-131757.pFRdmN.rst
@@ -0,0 +1 @@
+Make :func:`functools.lru_cache` call the cached function unlocked to allow 
concurrency.
diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c
index c84779268eb12e..e6c454faf4b16f 100644
--- a/Modules/_functoolsmodule.c
+++ b/Modules/_functoolsmodule.c
@@ -4,6 +4,7 @@
 #include "pycore_long.h"          // _PyLong_GetZero()
 #include "pycore_moduleobject.h"  // _PyModule_GetState()
 #include "pycore_object.h"        // _PyObject_GC_TRACK
+#include "pycore_pyatomic_ft_wrappers.h"
 #include "pycore_pystate.h"       // _PyThreadState_GET()
 #include "pycore_tuple.h"         // _PyTuple_ITEMS()
 
@@ -40,7 +41,6 @@ get_functools_state(PyObject *module)
     return (_functools_state *)state;
 }
 
-
 /* partial object **********************************************************/
 
 
@@ -1016,7 +1016,7 @@ _functools_reduce_impl(PyObject *module, PyObject *func, 
PyObject *seq,
 
     if (result == NULL)
         PyErr_SetString(PyExc_TypeError,
-                   "reduce() of empty iterable with no initial value");
+                        "reduce() of empty iterable with no initial value");
 
     Py_DECREF(it);
     return result;
@@ -1173,7 +1173,7 @@ uncached_lru_cache_wrapper(lru_cache_object *self, 
PyObject *args, PyObject *kwd
 {
     PyObject *result;
 
-    self->misses++;
+    FT_ATOMIC_ADD_SSIZE(self->misses, 1);
     result = PyObject_Call(self->func, args, kwds);
     if (!result)
         return NULL;
@@ -1193,18 +1193,17 @@ infinite_lru_cache_wrapper(lru_cache_object *self, 
PyObject *args, PyObject *kwd
         Py_DECREF(key);
         return NULL;
     }
-    result = _PyDict_GetItem_KnownHash(self->cache, key, hash);
-    if (result) {
-        Py_INCREF(result);
-        self->hits++;
+    int res = _PyDict_GetItemRef_KnownHash((PyDictObject *)self->cache, key, 
hash, &result);
+    if (res > 0) {
+        FT_ATOMIC_ADD_SSIZE(self->hits, 1);
         Py_DECREF(key);
         return result;
     }
-    if (PyErr_Occurred()) {
+    if (res < 0) {
         Py_DECREF(key);
         return NULL;
     }
-    self->misses++;
+    FT_ATOMIC_ADD_SSIZE(self->misses, 1);
     result = PyObject_Call(self->func, args, kwds);
     if (!result) {
         Py_DECREF(key);
@@ -1279,50 +1278,65 @@ lru_cache_prepend_link(lru_cache_object *self, 
lru_list_elem *link)
    so that we know the cache is a consistent state.
  */
 
-static PyObject *
-bounded_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject 
*kwds)
+static int
+bounded_lru_cache_get_lock_held(lru_cache_object *self, PyObject *args, 
PyObject *kwds,
+                                PyObject **result, PyObject **key, Py_hash_t 
*hash)
 {
+    _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(self);
     lru_list_elem *link;
-    PyObject *key, *result, *testresult;
-    Py_hash_t hash;
 
-    key = lru_cache_make_key(self->kwd_mark, args, kwds, self->typed);
-    if (!key)
-        return NULL;
-    hash = PyObject_Hash(key);
-    if (hash == -1) {
-        Py_DECREF(key);
-        return NULL;
+    PyObject *key_ = *key = lru_cache_make_key(self->kwd_mark, args, kwds, 
self->typed);
+    if (!key_)
+        return -1;
+    Py_hash_t hash_ = *hash = PyObject_Hash(key_);
+    if (hash_ == -1) {
+        Py_DECREF(key_);  /* dead reference left in *key, is not used */
+        return -1;
     }
-    link  = (lru_list_elem *)_PyDict_GetItem_KnownHash(self->cache, key, hash);
-    if (link != NULL) {
+    int res = _PyDict_GetItemRef_KnownHash_LockHeld((PyDictObject 
*)self->cache, key_, hash_,
+                                                    (PyObject **)&link);
+    if (res > 0) {
         lru_cache_extract_link(link);
         lru_cache_append_link(self, link);
-        result = link->result;
-        self->hits++;
-        Py_INCREF(result);
-        Py_DECREF(key);
-        return result;
+        *result = link->result;
+        FT_ATOMIC_ADD_SSIZE(self->hits, 1);
+        Py_INCREF(link->result);
+        Py_DECREF(link);
+        Py_DECREF(key_);
+        return 1;
     }
-    if (PyErr_Occurred()) {
-        Py_DECREF(key);
-        return NULL;
+    if (res < 0) {
+        Py_DECREF(key_);
+        return -1;
     }
-    self->misses++;
-    result = PyObject_Call(self->func, args, kwds);
+    FT_ATOMIC_ADD_SSIZE(self->misses, 1);
+    return 0;
+}
+
+static PyObject *
+bounded_lru_cache_update_lock_held(lru_cache_object *self,
+                                   PyObject *result, PyObject *key, Py_hash_t 
hash)
+{
+    _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(self);
+    lru_list_elem *link;
+    PyObject *testresult;
+    int res;
+
     if (!result) {
         Py_DECREF(key);
         return NULL;
     }
-    testresult = _PyDict_GetItem_KnownHash(self->cache, key, hash);
-    if (testresult != NULL) {
+    res = _PyDict_GetItemRef_KnownHash_LockHeld((PyDictObject *)self->cache, 
key, hash,
+                                                &testresult);
+    if (res > 0) {
         /* Getting here means that this same key was added to the cache
            during the PyObject_Call().  Since the link update is already
            done, we need only return the computed result. */
+        Py_DECREF(testresult);
         Py_DECREF(key);
         return result;
     }
-    if (PyErr_Occurred()) {
+    if (res < 0) {
         /* This is an unusual case since this same lookup
            did not previously trigger an error during lookup.
            Treat it the same as an error in user function
@@ -1386,8 +1400,8 @@ bounded_lru_cache_wrapper(lru_cache_object *self, 
PyObject *args, PyObject *kwds
        The cache dict holds one reference to the link.
        We created one other reference when the link was created.
        The linked list only has borrowed references. */
-    int res = _PyDict_Pop_KnownHash((PyDictObject*)self->cache, link->key,
-                                    link->hash, &popresult);
+    res = _PyDict_Pop_KnownHash((PyDictObject*)self->cache, link->key,
+                                link->hash, &popresult);
     if (res < 0) {
         /* An error arose while trying to remove the oldest key (the one
            being evicted) from the cache.  We restore the link to its
@@ -1445,6 +1459,37 @@ bounded_lru_cache_wrapper(lru_cache_object *self, 
PyObject *args, PyObject *kwds
     return result;
 }
 
+static PyObject *
+bounded_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject 
*kwds)
+{
+    PyObject *key, *result;
+    Py_hash_t hash;
+    int res;
+
+    Py_BEGIN_CRITICAL_SECTION(self);
+    res = bounded_lru_cache_get_lock_held(self, args, kwds, &result, &key, 
&hash);
+    Py_END_CRITICAL_SECTION();
+
+    if (res < 0) {
+        return NULL;
+    }
+    if (res > 0) {
+        return result;
+    }
+
+    result = PyObject_Call(self->func, args, kwds);
+
+    Py_BEGIN_CRITICAL_SECTION(self);
+    /* Note:  key will be stolen in the below function, and
+       result may be stolen or sometimes re-returned as a passthrough.
+       Treat both as being stolen.
+     */
+    result = bounded_lru_cache_update_lock_held(self, result, key, hash);
+    Py_END_CRITICAL_SECTION();
+
+    return result;
+}
+
 static PyObject *
 lru_cache_new(PyTypeObject *type, PyObject *args, PyObject *kw)
 {
@@ -1577,9 +1622,7 @@ lru_cache_call(PyObject *op, PyObject *args, PyObject 
*kwds)
 {
     lru_cache_object *self = lru_cache_object_CAST(op);
     PyObject *result;
-    Py_BEGIN_CRITICAL_SECTION(self);
     result = self->wrapper(self, args, kwds);
-    Py_END_CRITICAL_SECTION();
     return result;
 }
 
@@ -1606,11 +1649,15 @@ _functools__lru_cache_wrapper_cache_info_impl(PyObject 
*self)
     lru_cache_object *_self = (lru_cache_object *) self;
     if (_self->maxsize == -1) {
         return PyObject_CallFunction(_self->cache_info_type, "nnOn",
-                                     _self->hits, _self->misses, Py_None,
+                                     FT_ATOMIC_LOAD_SSIZE_RELAXED(_self->hits),
+                                     
FT_ATOMIC_LOAD_SSIZE_RELAXED(_self->misses),
+                                     Py_None,
                                      PyDict_GET_SIZE(_self->cache));
     }
     return PyObject_CallFunction(_self->cache_info_type, "nnnn",
-                                 _self->hits, _self->misses, _self->maxsize,
+                                 FT_ATOMIC_LOAD_SSIZE_RELAXED(_self->hits),
+                                 FT_ATOMIC_LOAD_SSIZE_RELAXED(_self->misses),
+                                 _self->maxsize,
                                  PyDict_GET_SIZE(_self->cache));
 }
 
@@ -1627,7 +1674,8 @@ _functools__lru_cache_wrapper_cache_clear_impl(PyObject 
*self)
 {
     lru_cache_object *_self = (lru_cache_object *) self;
     lru_list_elem *list = lru_cache_unlink_list(_self);
-    _self->hits = _self->misses = 0;
+    FT_ATOMIC_STORE_SSIZE_RELAXED(_self->hits, 0);
+    FT_ATOMIC_STORE_SSIZE_RELAXED(_self->misses, 0);
     PyDict_Clear(_self->cache);
     lru_cache_clear_list(list);
     Py_RETURN_NONE;

_______________________________________________
Python-checkins mailing list -- python-checkins@python.org
To unsubscribe send an email to python-checkins-le...@python.org
https://mail.python.org/mailman3/lists/python-checkins.python.org/
Member address: arch...@mail-archive.com

Reply via email to