]> git.ipfire.org Git - thirdparty/Python/cpython.git/commitdiff
gh-133454: Mark tests with many threads that use much memory as bigmem (GH-133456)
authorSerhiy Storchaka <storchaka@gmail.com>
Thu, 8 May 2025 12:57:30 +0000 (15:57 +0300)
committerGitHub <noreply@github.com>
Thu, 8 May 2025 12:57:30 +0000 (15:57 +0300)
Lib/test/test_asyncio/test_ssl.py
Lib/test/test_importlib/test_threaded_import.py
Lib/test/test_threadedtempfile.py
Lib/test/test_threading.py

index 986ecc2c5a964b235125c4f532e32505e47d34ca..3a7185cd8974d0ab382f4af8aeab995a9ee167c7 100644 (file)
@@ -195,9 +195,10 @@ class TestSSL(test_utils.TestCase):
         except (BrokenPipeError, ConnectionError):
             pass
 
-    def test_create_server_ssl_1(self):
+    @support.bigmemtest(size=25, memuse=90*2**20, dry_run=False)
+    def test_create_server_ssl_1(self, size):
         CNT = 0           # number of clients that were successful
-        TOTAL_CNT = 25    # total number of clients that test will create
+        TOTAL_CNT = size  # total number of clients that test will create
         TIMEOUT = support.LONG_TIMEOUT  # timeout for this test
 
         A_DATA = b'A' * 1024 * BUF_MULTIPLIER
@@ -1038,9 +1039,10 @@ class TestSSL(test_utils.TestCase):
 
         self.loop.run_until_complete(run_main())
 
-    def test_create_server_ssl_over_ssl(self):
+    @support.bigmemtest(size=25, memuse=90*2**20, dry_run=False)
+    def test_create_server_ssl_over_ssl(self, size):
         CNT = 0           # number of clients that were successful
-        TOTAL_CNT = 25    # total number of clients that test will create
+        TOTAL_CNT = size  # total number of clients that test will create
         TIMEOUT = support.LONG_TIMEOUT  # timeout for this test
 
         A_DATA = b'A' * 1024 * BUF_MULTIPLIER
index 9af1e4d505c66e02435e1b2e300674c42cf5e2ab..f78dc399720c8674cb44cfabe1b46232ff0eb8a2 100644 (file)
@@ -135,10 +135,12 @@ class ThreadedImportTests(unittest.TestCase):
             if verbose:
                 print("OK.")
 
-    def test_parallel_module_init(self):
+    @support.bigmemtest(size=50, memuse=76*2**20, dry_run=False)
+    def test_parallel_module_init(self, size):
         self.check_parallel_module_init()
 
-    def test_parallel_meta_path(self):
+    @support.bigmemtest(size=50, memuse=76*2**20, dry_run=False)
+    def test_parallel_meta_path(self, size):
         finder = Finder()
         sys.meta_path.insert(0, finder)
         try:
@@ -148,7 +150,8 @@ class ThreadedImportTests(unittest.TestCase):
         finally:
             sys.meta_path.remove(finder)
 
-    def test_parallel_path_hooks(self):
+    @support.bigmemtest(size=50, memuse=76*2**20, dry_run=False)
+    def test_parallel_path_hooks(self, size):
         # Here the Finder instance is only used to check concurrent calls
         # to path_hook().
         finder = Finder()
@@ -242,13 +245,15 @@ class ThreadedImportTests(unittest.TestCase):
             __import__(TESTFN)
         del sys.modules[TESTFN]
 
-    def test_concurrent_futures_circular_import(self):
+    @support.bigmemtest(size=1, memuse=1.8*2**30, dry_run=False)
+    def test_concurrent_futures_circular_import(self, size):
         # Regression test for bpo-43515
         fn = os.path.join(os.path.dirname(__file__),
                           'partial', 'cfimport.py')
         script_helper.assert_python_ok(fn)
 
-    def test_multiprocessing_pool_circular_import(self):
+    @support.bigmemtest(size=1, memuse=1.8*2**30, dry_run=False)
+    def test_multiprocessing_pool_circular_import(self, size):
         # Regression test for bpo-41567
         fn = os.path.join(os.path.dirname(__file__),
                           'partial', 'pool_in_threads.py')
index 420fc6ec8be3d8d9e7f6b85616fde730da445b24..acb427b0c78ae944a7625f4e06e2f85c38a79b4d 100644 (file)
@@ -15,6 +15,7 @@ provoking a 2.0 failure under Linux.
 
 import tempfile
 
+from test import support
 from test.support import threading_helper
 import unittest
 import io
@@ -49,7 +50,8 @@ class TempFileGreedy(threading.Thread):
 
 
 class ThreadedTempFileTest(unittest.TestCase):
-    def test_main(self):
+    @support.bigmemtest(size=NUM_THREADS, memuse=60*2**20, dry_run=False)
+    def test_main(self, size):
         threads = [TempFileGreedy() for i in range(NUM_THREADS)]
         with threading_helper.start_threads(threads, startEvent.set):
             pass
index 4ab38c2598b50a484051666c8818daa487201341..abe63c10c0ac7c906b2e4f2ec574833ec411a0f6 100644 (file)
@@ -530,7 +530,8 @@ class ThreadTests(BaseTestCase):
         finally:
             sys.setswitchinterval(old_interval)
 
-    def test_join_from_multiple_threads(self):
+    @support.bigmemtest(size=20, memuse=72*2**20, dry_run=False)
+    def test_join_from_multiple_threads(self, size):
         # Thread.join() should be thread-safe
         errors = []
 
@@ -1431,7 +1432,8 @@ class ThreadJoinOnShutdown(BaseTestCase):
         self._run_and_join(script)
 
     @unittest.skipIf(sys.platform in platforms_to_skip, "due to known OS bug")
-    def test_4_daemon_threads(self):
+    @support.bigmemtest(size=40, memuse=70*2**20, dry_run=False)
+    def test_4_daemon_threads(self, size):
         # Check that a daemon thread cannot crash the interpreter on shutdown
         # by manipulating internal structures that are being disposed of in
         # the main thread.