gh-108418: Speed up bigmem compression tests in dry mode (GH-108419)
Only generate and compress small amount of random data in dry run.
(cherry picked from commit
4ae3edf3008b70e20663143553a736d80ff3a501)
Co-authored-by: Serhiy Storchaka <storchaka@gmail.com>
@bigmemtest(size=_4G + 100, memuse=3.3)
def testDecompress4G(self, size):
# "Test BZ2Decompressor.decompress() with >4GiB input"
- blocksize = 10 * 1024 * 1024
+ blocksize = min(10 * 1024 * 1024, size)
block = random.randbytes(blocksize)
try:
- data = block * (size // blocksize + 1)
+ data = block * ((size-1) // blocksize + 1)
compressed = bz2.compress(data)
bz2d = BZ2Decompressor()
decompressed = bz2d.decompress(compressed)
@bigmemtest(size=_4G + 100, memuse=3)
def test_decompressor_bigmem(self, size):
lzd = LZMADecompressor()
- blocksize = 10 * 1024 * 1024
+ blocksize = min(10 * 1024 * 1024, size)
block = random.randbytes(blocksize)
try:
- input = block * (size // blocksize + 1)
+ input = block * ((size-1) // blocksize + 1)
cdata = lzma.compress(input)
ddata = lzd.decompress(cdata)
self.assertEqual(ddata, input)
@bigmemtest(size=_4G + 100, memuse=3.3)
def testDecompress4G(self, size):
# "Test zlib._ZlibDecompressor.decompress() with >4GiB input"
- blocksize = 10 * 1024 * 1024
+ blocksize = min(10 * 1024 * 1024, size)
block = random.randbytes(blocksize)
try:
- data = block * (size // blocksize + 1)
+ data = block * ((size-1) // blocksize + 1)
compressed = zlib.compress(data)
zlibd = zlib._ZlibDecompressor()
decompressed = zlibd.decompress(compressed)