From 28def3611d4fa9749e800d6740f2c8e7d8dac1d8 Mon Sep 17 00:00:00 2001 From: Serhiy Storchaka Date: Mon, 5 May 2025 19:47:37 +0300 Subject: [PATCH] gh-133454: Mark tests with many threads that use much memory as bigmem --- Lib/test/test_asyncio/test_ssl.py | 10 ++++++---- Lib/test/test_importlib/test_threaded_import.py | 15 ++++++++++----- Lib/test/test_threadedtempfile.py | 4 +++- Lib/test/test_threading.py | 6 ++++-- 4 files changed, 23 insertions(+), 12 deletions(-) diff --git a/Lib/test/test_asyncio/test_ssl.py b/Lib/test/test_asyncio/test_ssl.py index 986ecc2c5a964b..3a7185cd8974d0 100644 --- a/Lib/test/test_asyncio/test_ssl.py +++ b/Lib/test/test_asyncio/test_ssl.py @@ -195,9 +195,10 @@ async def wait_closed(self, obj): except (BrokenPipeError, ConnectionError): pass - def test_create_server_ssl_1(self): + @support.bigmemtest(size=25, memuse=90*2**20, dry_run=False) + def test_create_server_ssl_1(self, size): CNT = 0 # number of clients that were successful - TOTAL_CNT = 25 # total number of clients that test will create + TOTAL_CNT = size # total number of clients that test will create TIMEOUT = support.LONG_TIMEOUT # timeout for this test A_DATA = b'A' * 1024 * BUF_MULTIPLIER @@ -1038,9 +1039,10 @@ async def run_main(): self.loop.run_until_complete(run_main()) - def test_create_server_ssl_over_ssl(self): + @support.bigmemtest(size=25, memuse=90*2**20, dry_run=False) + def test_create_server_ssl_over_ssl(self, size): CNT = 0 # number of clients that were successful - TOTAL_CNT = 25 # total number of clients that test will create + TOTAL_CNT = size # total number of clients that test will create TIMEOUT = support.LONG_TIMEOUT # timeout for this test A_DATA = b'A' * 1024 * BUF_MULTIPLIER diff --git a/Lib/test/test_importlib/test_threaded_import.py b/Lib/test/test_importlib/test_threaded_import.py index 9af1e4d505c66e..f78dc399720c86 100644 --- a/Lib/test/test_importlib/test_threaded_import.py +++ b/Lib/test/test_importlib/test_threaded_import.py @@ -135,10 +135,12 @@ def check_parallel_module_init(self, mock_os): if verbose: print("OK.") - def test_parallel_module_init(self): + @support.bigmemtest(size=50, memuse=76*2**20, dry_run=False) + def test_parallel_module_init(self, size): self.check_parallel_module_init() - def test_parallel_meta_path(self): + @support.bigmemtest(size=50, memuse=76*2**20, dry_run=False) + def test_parallel_meta_path(self, size): finder = Finder() sys.meta_path.insert(0, finder) try: @@ -148,7 +150,8 @@ def test_parallel_meta_path(self): finally: sys.meta_path.remove(finder) - def test_parallel_path_hooks(self): + @support.bigmemtest(size=50, memuse=76*2**20, dry_run=False) + def test_parallel_path_hooks(self, size): # Here the Finder instance is only used to check concurrent calls # to path_hook(). finder = Finder() @@ -242,13 +245,15 @@ def target(): __import__(TESTFN) del sys.modules[TESTFN] - def test_concurrent_futures_circular_import(self): + @support.bigmemtest(size=1, memuse=1.8*2**30, dry_run=False) + def test_concurrent_futures_circular_import(self, size): # Regression test for bpo-43515 fn = os.path.join(os.path.dirname(__file__), 'partial', 'cfimport.py') script_helper.assert_python_ok(fn) - def test_multiprocessing_pool_circular_import(self): + @support.bigmemtest(size=1, memuse=1.8*2**30, dry_run=False) + def test_multiprocessing_pool_circular_import(self, size): # Regression test for bpo-41567 fn = os.path.join(os.path.dirname(__file__), 'partial', 'pool_in_threads.py') diff --git a/Lib/test/test_threadedtempfile.py b/Lib/test/test_threadedtempfile.py index 420fc6ec8be3d8..acb427b0c78ae9 100644 --- a/Lib/test/test_threadedtempfile.py +++ b/Lib/test/test_threadedtempfile.py @@ -15,6 +15,7 @@ import tempfile +from test import support from test.support import threading_helper import unittest import io @@ -49,7 +50,8 @@ def run(self): class ThreadedTempFileTest(unittest.TestCase): - def test_main(self): + @support.bigmemtest(size=NUM_THREADS, memuse=60*2**20, dry_run=False) + def test_main(self, size): threads = [TempFileGreedy() for i in range(NUM_THREADS)] with threading_helper.start_threads(threads, startEvent.set): pass diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py index 814c00ca0fd7b6..b31418b23565a5 100644 --- a/Lib/test/test_threading.py +++ b/Lib/test/test_threading.py @@ -526,7 +526,8 @@ def test_enumerate_after_join(self): finally: sys.setswitchinterval(old_interval) - def test_join_from_multiple_threads(self): + @support.bigmemtest(size=20, memuse=72*2**20, dry_run=False) + def test_join_from_multiple_threads(self, size): # Thread.join() should be thread-safe errors = [] @@ -1427,7 +1428,8 @@ def worker(): self._run_and_join(script) @unittest.skipIf(sys.platform in platforms_to_skip, "due to known OS bug") - def test_4_daemon_threads(self): + @support.bigmemtest(size=40, memuse=70*2**20, dry_run=False) + def test_4_daemon_threads(self, size): # Check that a daemon thread cannot crash the interpreter on shutdown # by manipulating internal structures that are being disposed of in # the main thread. pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy