]> git.ipfire.org Git - thirdparty/Python/cpython.git/commitdiff
gh-131525: Remove `_HashedSeq` wrapper from `lru_cache` (gh-131922)
authorLukas Geiger <lukas.geiger94@gmail.com>
Mon, 31 Mar 2025 13:23:41 +0000 (14:23 +0100)
committerGitHub <noreply@github.com>
Mon, 31 Mar 2025 13:23:41 +0000 (08:23 -0500)
Lib/functools.py

index e0e45bc336c1efb4441fe415654c342a5c99dfc0..714070c6ac94609ed162e4ce357de8788fb6a071 100644 (file)
@@ -516,22 +516,6 @@ def _unwrap_partialmethod(func):
 
 _CacheInfo = namedtuple("CacheInfo", ["hits", "misses", "maxsize", "currsize"])
 
-class _HashedSeq(list):
-    """ This class guarantees that hash() will be called no more than once
-        per element.  This is important because the lru_cache() will hash
-        the key multiple times on a cache miss.
-
-    """
-
-    __slots__ = 'hashvalue'
-
-    def __init__(self, tup, hash=hash):
-        self[:] = tup
-        self.hashvalue = hash(tup)
-
-    def __hash__(self):
-        return self.hashvalue
-
 def _make_key(args, kwds, typed,
              kwd_mark = (object(),),
              fasttypes = {int, str},
@@ -561,7 +545,7 @@ def _make_key(args, kwds, typed,
             key += tuple(type(v) for v in kwds.values())
     elif len(key) == 1 and type(key[0]) in fasttypes:
         return key[0]
-    return _HashedSeq(key)
+    return key
 
 def lru_cache(maxsize=128, typed=False):
     """Least-recently-used cache decorator.