]> git.ipfire.org Git - thirdparty/Python/cpython.git/commitdiff
[3.13] gh-133454: Mark tests with many threads that use much memory as bigmem (GH...
authorMiss Islington (bot) <31488909+miss-islington@users.noreply.github.com>
Thu, 8 May 2025 13:20:34 +0000 (15:20 +0200)
committerGitHub <noreply@github.com>
Thu, 8 May 2025 13:20:34 +0000 (13:20 +0000)
(cherry picked from commit 26839eae2079bd797491dfdcc00eaca1dbcbf77c)

Co-authored-by: Serhiy Storchaka <storchaka@gmail.com>
Lib/test/test_asyncio/test_ssl.py
Lib/test/test_importlib/test_threaded_import.py
Lib/test/test_threadedtempfile.py
Lib/test/test_threading.py

index e4ab5a9024c95679fa25340938b8b578468447f1..c5667a3db9bc2bcdd516bc1384fdeaf4327cdfa4 100644 (file)
@@ -195,9 +195,10 @@ class TestSSL(test_utils.TestCase):
         except (BrokenPipeError, ConnectionError):
             pass
 
-    def test_create_server_ssl_1(self):
+    @support.bigmemtest(size=25, memuse=90*2**20, dry_run=False)
+    def test_create_server_ssl_1(self, size):
         CNT = 0           # number of clients that were successful
-        TOTAL_CNT = 25    # total number of clients that test will create
+        TOTAL_CNT = size  # total number of clients that test will create
         TIMEOUT = support.LONG_TIMEOUT  # timeout for this test
 
         A_DATA = b'A' * 1024 * BUF_MULTIPLIER
@@ -1038,9 +1039,10 @@ class TestSSL(test_utils.TestCase):
 
         self.loop.run_until_complete(run_main())
 
-    def test_create_server_ssl_over_ssl(self):
+    @support.bigmemtest(size=25, memuse=90*2**20, dry_run=False)
+    def test_create_server_ssl_over_ssl(self, size):
         CNT = 0           # number of clients that were successful
-        TOTAL_CNT = 25    # total number of clients that test will create
+        TOTAL_CNT = size  # total number of clients that test will create
         TIMEOUT = support.LONG_TIMEOUT  # timeout for this test
 
         A_DATA = b'A' * 1024 * BUF_MULTIPLIER
index 9af1e4d505c66e02435e1b2e300674c42cf5e2ab..f78dc399720c8674cb44cfabe1b46232ff0eb8a2 100644 (file)
@@ -135,10 +135,12 @@ class ThreadedImportTests(unittest.TestCase):
             if verbose:
                 print("OK.")
 
-    def test_parallel_module_init(self):
+    @support.bigmemtest(size=50, memuse=76*2**20, dry_run=False)
+    def test_parallel_module_init(self, size):
         self.check_parallel_module_init()
 
-    def test_parallel_meta_path(self):
+    @support.bigmemtest(size=50, memuse=76*2**20, dry_run=False)
+    def test_parallel_meta_path(self, size):
         finder = Finder()
         sys.meta_path.insert(0, finder)
         try:
@@ -148,7 +150,8 @@ class ThreadedImportTests(unittest.TestCase):
         finally:
             sys.meta_path.remove(finder)
 
-    def test_parallel_path_hooks(self):
+    @support.bigmemtest(size=50, memuse=76*2**20, dry_run=False)
+    def test_parallel_path_hooks(self, size):
         # Here the Finder instance is only used to check concurrent calls
         # to path_hook().
         finder = Finder()
@@ -242,13 +245,15 @@ class ThreadedImportTests(unittest.TestCase):
             __import__(TESTFN)
         del sys.modules[TESTFN]
 
-    def test_concurrent_futures_circular_import(self):
+    @support.bigmemtest(size=1, memuse=1.8*2**30, dry_run=False)
+    def test_concurrent_futures_circular_import(self, size):
         # Regression test for bpo-43515
         fn = os.path.join(os.path.dirname(__file__),
                           'partial', 'cfimport.py')
         script_helper.assert_python_ok(fn)
 
-    def test_multiprocessing_pool_circular_import(self):
+    @support.bigmemtest(size=1, memuse=1.8*2**30, dry_run=False)
+    def test_multiprocessing_pool_circular_import(self, size):
         # Regression test for bpo-41567
         fn = os.path.join(os.path.dirname(__file__),
                           'partial', 'pool_in_threads.py')
index 420fc6ec8be3d8d9e7f6b85616fde730da445b24..acb427b0c78ae944a7625f4e06e2f85c38a79b4d 100644 (file)
@@ -15,6 +15,7 @@ provoking a 2.0 failure under Linux.
 
 import tempfile
 
+from test import support
 from test.support import threading_helper
 import unittest
 import io
@@ -49,7 +50,8 @@ class TempFileGreedy(threading.Thread):
 
 
 class ThreadedTempFileTest(unittest.TestCase):
-    def test_main(self):
+    @support.bigmemtest(size=NUM_THREADS, memuse=60*2**20, dry_run=False)
+    def test_main(self, size):
         threads = [TempFileGreedy() for i in range(NUM_THREADS)]
         with threading_helper.start_threads(threads, startEvent.set):
             pass
index c4cf3e6a14a61c00210368721ca0f2844325c4de..51e3f4a66510c91f0e3c60fa4af816a49252a59d 100644 (file)
@@ -526,7 +526,8 @@ class ThreadTests(BaseTestCase):
         finally:
             sys.setswitchinterval(old_interval)
 
-    def test_join_from_multiple_threads(self):
+    @support.bigmemtest(size=20, memuse=72*2**20, dry_run=False)
+    def test_join_from_multiple_threads(self, size):
         # Thread.join() should be thread-safe
         errors = []
 
@@ -1287,7 +1288,8 @@ class ThreadJoinOnShutdown(BaseTestCase):
         self._run_and_join(script)
 
     @unittest.skipIf(sys.platform in platforms_to_skip, "due to known OS bug")
-    def test_4_daemon_threads(self):
+    @support.bigmemtest(size=40, memuse=70*2**20, dry_run=False)
+    def test_4_daemon_threads(self, size):
         # Check that a daemon thread cannot crash the interpreter on shutdown
         # by manipulating internal structures that are being disposed of in
         # the main thread.