def testDecompress4G(self, size):
# "Test BZ2Decompressor.decompress() with >4GiB input"
blocksize = 10 * 1024 * 1024
- block = random.getrandbits(blocksize * 8).to_bytes(blocksize, 'little')
+ block = random.randbytes(blocksize)
try:
data = block * (size // blocksize + 1)
compressed = bz2.compress(data)
def test_decompressor_bigmem(self, size):
lzd = LZMADecompressor()
blocksize = 10 * 1024 * 1024
- block = random.getrandbits(blocksize * 8).to_bytes(blocksize, "little")
+ block = random.randbytes(blocksize)
try:
input = block * (size // blocksize + 1)
cdata = lzma.compress(input)
def test_ignore_zeros(self):
# Test TarFile's ignore_zeros option.
# generate 512 pseudorandom bytes
- data = Random(0).getrandbits(512*8).to_bytes(512, 'big')
+ data = Random(0).randbytes(512)
for char in (b'\0', b'a'):
# Test if EOFHeaderError ('\0') and InvalidHeaderError ('a')
# are ignored correctly.
from tempfile import TemporaryFile
-from random import randint, random, getrandbits
+from random import randint, random, randbytes
from test.support import script_helper
from test.support import (TESTFN, findfile, unlink, rmtree, temp_dir, temp_cwd,
('ziptest2dir/ziptest3dir/_ziptest3', 'azsxdcfvgb'),
('ziptest2dir/ziptest3dir/ziptest4dir/_ziptest3', '6y7u8i9o0p')]
-def getrandbytes(size):
- return getrandbits(8 * size).to_bytes(size, 'little')
-
def get_files(test):
yield TESTFN2
with TemporaryFile() as f:
# than requested.
for test_size in (1, 4095, 4096, 4097, 16384):
file_size = test_size + 1
- junk = getrandbytes(file_size)
+ junk = randbytes(file_size)
with zipfile.ZipFile(io.BytesIO(), "w", self.compression) as zipf:
zipf.writestr('foo', junk)
with zipf.open('foo', 'r') as fp:
class TestsWithMultipleOpens(unittest.TestCase):
@classmethod
def setUpClass(cls):
- cls.data1 = b'111' + getrandbytes(10000)
- cls.data2 = b'222' + getrandbytes(10000)
+ cls.data1 = b'111' + randbytes(10000)
+ cls.data2 = b'222' + randbytes(10000)
def make_test_archive(self, f):
# Create the ZIP archive
# Generate 10 MiB worth of random, and expand it by repeating it.
# The assumption is that zlib's memory is not big enough to exploit
# such spread out redundancy.
- data = b''.join([random.getrandbits(8 * _1M).to_bytes(_1M, 'little')
- for i in range(10)])
+ data = random.randbytes(_1M * 10)
data = data * (size // len(data) + 1)
try:
compress_func(data)
# others might simply have a single RNG
gen = random
gen.seed(1)
- data = genblock(1, 17 * 1024, generator=gen)
+ data = gen.randbytes(17 * 1024)
# compress, sync-flush, and decompress
first = co.compress(data)
self.assertEqual(dco.decompress(gzip), HAMLET_SCENE)
-def genblock(seed, length, step=1024, generator=random):
- """length-byte stream of random data from a seed (in step-byte blocks)."""
- if seed is not None:
- generator.seed(seed)
- randint = generator.randint
- if length < step or step < 2:
- step = length
- blocks = bytes()
- for i in range(0, length, step):
- blocks += bytes(randint(0, 255) for x in range(step))
- return blocks
-
-
-
def choose_lines(source, number, seed=None, generator=random):
"""Return a list of number lines randomly chosen from the source"""
if seed is not None:
return [generator.choice(sources) for n in range(number)]
-
HAMLET_SCENE = b"""
LAERTES