]> git.ipfire.org Git - thirdparty/Python/cpython.git/commitdiff
gh-93117: Remove too large sqlite3 bigmemtest (#93154)
authorErlend Egeberg Aasland <erlend.aasland@protonmail.com>
Wed, 25 May 2022 12:53:26 +0000 (14:53 +0200)
committerGitHub <noreply@github.com>
Wed, 25 May 2022 12:53:26 +0000 (14:53 +0200)
Lib/test/test_sqlite3/test_dbapi.py

index 840a401b6b3ee2b139fb6e8e0dda4f997a66fea5..1fa02db3b3af4127792654da7928902c2abd6356 100644 (file)
@@ -29,7 +29,7 @@ import threading
 import unittest
 import urllib.parse
 
-from test.support import SHORT_TIMEOUT, bigmemtest, check_disallow_instantiation
+from test.support import SHORT_TIMEOUT, check_disallow_instantiation
 from test.support import threading_helper
 from _testcapi import INT_MAX, ULLONG_MAX
 from os import SEEK_SET, SEEK_CUR, SEEK_END
@@ -626,13 +626,6 @@ class SerializeTests(unittest.TestCase):
                 # deserialized database.
                 cx.execute("create table fail(f)")
 
-    @unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform')
-    @bigmemtest(size=2**63, memuse=3, dry_run=False)
-    def test_deserialize_too_much_data_64bit(self):
-        with memory_database() as cx:
-            with self.assertRaisesRegex(OverflowError, "'data' is too large"):
-                cx.deserialize(b"b" * size)
-
 
 class OpenTests(unittest.TestCase):
     _sql = "create table test(id integer)"