#include "pycore_long.h" // _PyLong_GetZero()
#include "pycore_moduleobject.h" // _PyModule_GetState()
#include "pycore_object.h" // _PyObject_GC_TRACK
+#include "pycore_pyatomic_ft_wrappers.h"
#include "pycore_pystate.h" // _PyThreadState_GET()
#include "pycore_tuple.h" // _PyTuple_ITEMS()
return (_functools_state *)state;
}
-
/* partial object **********************************************************/
if (result == NULL)
PyErr_SetString(PyExc_TypeError,
- "reduce() of empty iterable with no initial value");
+ "reduce() of empty iterable with no initial value");
Py_DECREF(it);
return result;
{
PyObject *result;
- self->misses++;
+ FT_ATOMIC_ADD_SSIZE(self->misses, 1);
result = PyObject_Call(self->func, args, kwds);
if (!result)
return NULL;
Py_DECREF(key);
return NULL;
}
- result = _PyDict_GetItem_KnownHash(self->cache, key, hash);
- if (result) {
- Py_INCREF(result);
- self->hits++;
+ int res = _PyDict_GetItemRef_KnownHash((PyDictObject *)self->cache, key, hash, &result);
+ if (res > 0) {
+ FT_ATOMIC_ADD_SSIZE(self->hits, 1);
Py_DECREF(key);
return result;
}
- if (PyErr_Occurred()) {
+ if (res < 0) {
Py_DECREF(key);
return NULL;
}
- self->misses++;
+ FT_ATOMIC_ADD_SSIZE(self->misses, 1);
result = PyObject_Call(self->func, args, kwds);
if (!result) {
Py_DECREF(key);
so that we know the cache is a consistent state.
*/
-static PyObject *
-bounded_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds)
+static int
+bounded_lru_cache_get_lock_held(lru_cache_object *self, PyObject *args, PyObject *kwds,
+ PyObject **result, PyObject **key, Py_hash_t *hash)
{
+ _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(self);
lru_list_elem *link;
- PyObject *key, *result, *testresult;
- Py_hash_t hash;
- key = lru_cache_make_key(self->kwd_mark, args, kwds, self->typed);
- if (!key)
- return NULL;
- hash = PyObject_Hash(key);
- if (hash == -1) {
- Py_DECREF(key);
- return NULL;
+ PyObject *key_ = *key = lru_cache_make_key(self->kwd_mark, args, kwds, self->typed);
+ if (!key_)
+ return -1;
+ Py_hash_t hash_ = *hash = PyObject_Hash(key_);
+ if (hash_ == -1) {
+ Py_DECREF(key_); /* dead reference left in *key, is not used */
+ return -1;
}
- link = (lru_list_elem *)_PyDict_GetItem_KnownHash(self->cache, key, hash);
- if (link != NULL) {
+ int res = _PyDict_GetItemRef_KnownHash_LockHeld((PyDictObject *)self->cache, key_, hash_,
+ (PyObject **)&link);
+ if (res > 0) {
lru_cache_extract_link(link);
lru_cache_append_link(self, link);
- result = link->result;
- self->hits++;
- Py_INCREF(result);
- Py_DECREF(key);
- return result;
+ *result = link->result;
+ FT_ATOMIC_ADD_SSIZE(self->hits, 1);
+ Py_INCREF(link->result);
+ Py_DECREF(link);
+ Py_DECREF(key_);
+ return 1;
}
- if (PyErr_Occurred()) {
- Py_DECREF(key);
- return NULL;
+ if (res < 0) {
+ Py_DECREF(key_);
+ return -1;
}
- self->misses++;
- result = PyObject_Call(self->func, args, kwds);
+ FT_ATOMIC_ADD_SSIZE(self->misses, 1);
+ return 0;
+}
+
+static PyObject *
+bounded_lru_cache_update_lock_held(lru_cache_object *self,
+ PyObject *result, PyObject *key, Py_hash_t hash)
+{
+ _Py_CRITICAL_SECTION_ASSERT_OBJECT_LOCKED(self);
+ lru_list_elem *link;
+ PyObject *testresult;
+ int res;
+
if (!result) {
Py_DECREF(key);
return NULL;
}
- testresult = _PyDict_GetItem_KnownHash(self->cache, key, hash);
- if (testresult != NULL) {
+ res = _PyDict_GetItemRef_KnownHash_LockHeld((PyDictObject *)self->cache, key, hash,
+ &testresult);
+ if (res > 0) {
/* Getting here means that this same key was added to the cache
during the PyObject_Call(). Since the link update is already
done, we need only return the computed result. */
+ Py_DECREF(testresult);
Py_DECREF(key);
return result;
}
- if (PyErr_Occurred()) {
+ if (res < 0) {
/* This is an unusual case since this same lookup
did not previously trigger an error during lookup.
Treat it the same as an error in user function
The cache dict holds one reference to the link.
We created one other reference when the link was created.
The linked list only has borrowed references. */
- int res = _PyDict_Pop_KnownHash((PyDictObject*)self->cache, link->key,
- link->hash, &popresult);
+ res = _PyDict_Pop_KnownHash((PyDictObject*)self->cache, link->key,
+ link->hash, &popresult);
if (res < 0) {
/* An error arose while trying to remove the oldest key (the one
being evicted) from the cache. We restore the link to its
return result;
}
+static PyObject *
+bounded_lru_cache_wrapper(lru_cache_object *self, PyObject *args, PyObject *kwds)
+{
+ PyObject *key, *result;
+ Py_hash_t hash;
+ int res;
+
+ Py_BEGIN_CRITICAL_SECTION(self);
+ res = bounded_lru_cache_get_lock_held(self, args, kwds, &result, &key, &hash);
+ Py_END_CRITICAL_SECTION();
+
+ if (res < 0) {
+ return NULL;
+ }
+ if (res > 0) {
+ return result;
+ }
+
+ result = PyObject_Call(self->func, args, kwds);
+
+ Py_BEGIN_CRITICAL_SECTION(self);
+ /* Note: key will be stolen in the below function, and
+ result may be stolen or sometimes re-returned as a passthrough.
+ Treat both as being stolen.
+ */
+ result = bounded_lru_cache_update_lock_held(self, result, key, hash);
+ Py_END_CRITICAL_SECTION();
+
+ return result;
+}
+
static PyObject *
lru_cache_new(PyTypeObject *type, PyObject *args, PyObject *kw)
{
{
lru_cache_object *self = lru_cache_object_CAST(op);
PyObject *result;
- Py_BEGIN_CRITICAL_SECTION(self);
result = self->wrapper(self, args, kwds);
- Py_END_CRITICAL_SECTION();
return result;
}
lru_cache_object *_self = (lru_cache_object *) self;
if (_self->maxsize == -1) {
return PyObject_CallFunction(_self->cache_info_type, "nnOn",
- _self->hits, _self->misses, Py_None,
+ FT_ATOMIC_LOAD_SSIZE_RELAXED(_self->hits),
+ FT_ATOMIC_LOAD_SSIZE_RELAXED(_self->misses),
+ Py_None,
PyDict_GET_SIZE(_self->cache));
}
return PyObject_CallFunction(_self->cache_info_type, "nnnn",
- _self->hits, _self->misses, _self->maxsize,
+ FT_ATOMIC_LOAD_SSIZE_RELAXED(_self->hits),
+ FT_ATOMIC_LOAD_SSIZE_RELAXED(_self->misses),
+ _self->maxsize,
PyDict_GET_SIZE(_self->cache));
}
{
lru_cache_object *_self = (lru_cache_object *) self;
lru_list_elem *list = lru_cache_unlink_list(_self);
- _self->hits = _self->misses = 0;
+ FT_ATOMIC_STORE_SSIZE_RELAXED(_self->hits, 0);
+ FT_ATOMIC_STORE_SSIZE_RELAXED(_self->misses, 0);
PyDict_Clear(_self->cache);
lru_cache_clear_list(list);
Py_RETURN_NONE;