comparison contrib/python-zstandard/tests/test_decompressor_fuzzing.py @ 43994:de7838053207

zstandard: vendor python-zstandard 0.13.0 Version 0.13.0 of the package was just released. It contains an upgraded zstd C library which can result in some performance wins, official support for Python 3.8, and a blackened code base. There were no meaningful code or functionality changes in this release of python-zstandard: just reformatting and an upgraded zstd library version. So the diff seems much larger than what it is. Files were added without modifications. The clang-format-ignorelist file was updated to reflect a new header file in the zstd distribution. # no-check-commit because 3rd party code has different style guidelines Differential Revision: https://phab.mercurial-scm.org/D7770
author Gregory Szorc <gregory.szorc@gmail.com>
date Sat, 28 Dec 2019 09:55:45 -0800
parents 675775c33ab6
children 5e84a96d865b
comparison
equal deleted inserted replaced
43993:873d0fecb9a3 43994:de7838053207
4 4
5 try: 5 try:
6 import hypothesis 6 import hypothesis
7 import hypothesis.strategies as strategies 7 import hypothesis.strategies as strategies
8 except ImportError: 8 except ImportError:
9 raise unittest.SkipTest('hypothesis not available') 9 raise unittest.SkipTest("hypothesis not available")
10 10
11 import zstandard as zstd 11 import zstandard as zstd
12 12
13 from . common import ( 13 from .common import (
14 make_cffi, 14 make_cffi,
15 NonClosingBytesIO, 15 NonClosingBytesIO,
16 random_input_data, 16 random_input_data,
17 TestCase,
17 ) 18 )
18 19
19 20
20 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set') 21 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
21 @make_cffi 22 @make_cffi
22 class TestDecompressor_stream_reader_fuzzing(unittest.TestCase): 23 class TestDecompressor_stream_reader_fuzzing(TestCase):
23 @hypothesis.settings( 24 @hypothesis.settings(
24 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 25 suppress_health_check=[
25 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 26 hypothesis.HealthCheck.large_base_example,
26 level=strategies.integers(min_value=1, max_value=5), 27 hypothesis.HealthCheck.too_slow,
27 streaming=strategies.booleans(), 28 ]
28 source_read_size=strategies.integers(1, 1048576), 29 )
29 read_sizes=strategies.data()) 30 @hypothesis.given(
30 def test_stream_source_read_variance(self, original, level, streaming, 31 original=strategies.sampled_from(random_input_data()),
31 source_read_size, read_sizes): 32 level=strategies.integers(min_value=1, max_value=5),
33 streaming=strategies.booleans(),
34 source_read_size=strategies.integers(1, 1048576),
35 read_sizes=strategies.data(),
36 )
37 def test_stream_source_read_variance(
38 self, original, level, streaming, source_read_size, read_sizes
39 ):
32 cctx = zstd.ZstdCompressor(level=level) 40 cctx = zstd.ZstdCompressor(level=level)
33 41
34 if streaming: 42 if streaming:
35 source = io.BytesIO() 43 source = io.BytesIO()
36 writer = cctx.stream_writer(source) 44 writer = cctx.stream_writer(source)
51 if not chunk and read_size: 59 if not chunk and read_size:
52 break 60 break
53 61
54 chunks.append(chunk) 62 chunks.append(chunk)
55 63
56 self.assertEqual(b''.join(chunks), original) 64 self.assertEqual(b"".join(chunks), original)
57 65
58 # Similar to above except we have a constant read() size. 66 # Similar to above except we have a constant read() size.
59 @hypothesis.settings( 67 @hypothesis.settings(
60 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 68 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
61 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 69 )
62 level=strategies.integers(min_value=1, max_value=5), 70 @hypothesis.given(
63 streaming=strategies.booleans(), 71 original=strategies.sampled_from(random_input_data()),
64 source_read_size=strategies.integers(1, 1048576), 72 level=strategies.integers(min_value=1, max_value=5),
65 read_size=strategies.integers(-1, 131072)) 73 streaming=strategies.booleans(),
66 def test_stream_source_read_size(self, original, level, streaming, 74 source_read_size=strategies.integers(1, 1048576),
67 source_read_size, read_size): 75 read_size=strategies.integers(-1, 131072),
76 )
77 def test_stream_source_read_size(
78 self, original, level, streaming, source_read_size, read_size
79 ):
68 if read_size == 0: 80 if read_size == 0:
69 read_size = 1 81 read_size = 1
70 82
71 cctx = zstd.ZstdCompressor(level=level) 83 cctx = zstd.ZstdCompressor(level=level)
72 84
89 if not chunk and read_size: 101 if not chunk and read_size:
90 break 102 break
91 103
92 chunks.append(chunk) 104 chunks.append(chunk)
93 105
94 self.assertEqual(b''.join(chunks), original) 106 self.assertEqual(b"".join(chunks), original)
95 107
96 @hypothesis.settings( 108 @hypothesis.settings(
97 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 109 suppress_health_check=[
98 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 110 hypothesis.HealthCheck.large_base_example,
99 level=strategies.integers(min_value=1, max_value=5), 111 hypothesis.HealthCheck.too_slow,
100 streaming=strategies.booleans(), 112 ]
101 source_read_size=strategies.integers(1, 1048576), 113 )
102 read_sizes=strategies.data()) 114 @hypothesis.given(
103 def test_buffer_source_read_variance(self, original, level, streaming, 115 original=strategies.sampled_from(random_input_data()),
104 source_read_size, read_sizes): 116 level=strategies.integers(min_value=1, max_value=5),
117 streaming=strategies.booleans(),
118 source_read_size=strategies.integers(1, 1048576),
119 read_sizes=strategies.data(),
120 )
121 def test_buffer_source_read_variance(
122 self, original, level, streaming, source_read_size, read_sizes
123 ):
105 cctx = zstd.ZstdCompressor(level=level) 124 cctx = zstd.ZstdCompressor(level=level)
106 125
107 if streaming: 126 if streaming:
108 source = io.BytesIO() 127 source = io.BytesIO()
109 writer = cctx.stream_writer(source) 128 writer = cctx.stream_writer(source)
123 if not chunk and read_size: 142 if not chunk and read_size:
124 break 143 break
125 144
126 chunks.append(chunk) 145 chunks.append(chunk)
127 146
128 self.assertEqual(b''.join(chunks), original) 147 self.assertEqual(b"".join(chunks), original)
129 148
130 # Similar to above except we have a constant read() size. 149 # Similar to above except we have a constant read() size.
131 @hypothesis.settings( 150 @hypothesis.settings(
132 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 151 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
133 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 152 )
134 level=strategies.integers(min_value=1, max_value=5), 153 @hypothesis.given(
135 streaming=strategies.booleans(), 154 original=strategies.sampled_from(random_input_data()),
136 source_read_size=strategies.integers(1, 1048576), 155 level=strategies.integers(min_value=1, max_value=5),
137 read_size=strategies.integers(-1, 131072)) 156 streaming=strategies.booleans(),
138 def test_buffer_source_constant_read_size(self, original, level, streaming, 157 source_read_size=strategies.integers(1, 1048576),
139 source_read_size, read_size): 158 read_size=strategies.integers(-1, 131072),
159 )
160 def test_buffer_source_constant_read_size(
161 self, original, level, streaming, source_read_size, read_size
162 ):
140 if read_size == 0: 163 if read_size == 0:
141 read_size = -1 164 read_size = -1
142 165
143 cctx = zstd.ZstdCompressor(level=level) 166 cctx = zstd.ZstdCompressor(level=level)
144 167
160 if not chunk and read_size: 183 if not chunk and read_size:
161 break 184 break
162 185
163 chunks.append(chunk) 186 chunks.append(chunk)
164 187
165 self.assertEqual(b''.join(chunks), original) 188 self.assertEqual(b"".join(chunks), original)
166 189
167 @hypothesis.settings( 190 @hypothesis.settings(
168 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 191 suppress_health_check=[hypothesis.HealthCheck.large_base_example]
169 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 192 )
170 level=strategies.integers(min_value=1, max_value=5), 193 @hypothesis.given(
171 streaming=strategies.booleans(), 194 original=strategies.sampled_from(random_input_data()),
172 source_read_size=strategies.integers(1, 1048576)) 195 level=strategies.integers(min_value=1, max_value=5),
173 def test_stream_source_readall(self, original, level, streaming, 196 streaming=strategies.booleans(),
174 source_read_size): 197 source_read_size=strategies.integers(1, 1048576),
198 )
199 def test_stream_source_readall(self, original, level, streaming, source_read_size):
175 cctx = zstd.ZstdCompressor(level=level) 200 cctx = zstd.ZstdCompressor(level=level)
176 201
177 if streaming: 202 if streaming:
178 source = io.BytesIO() 203 source = io.BytesIO()
179 writer = cctx.stream_writer(source) 204 writer = cctx.stream_writer(source)
188 213
189 data = dctx.stream_reader(source, read_size=source_read_size).readall() 214 data = dctx.stream_reader(source, read_size=source_read_size).readall()
190 self.assertEqual(data, original) 215 self.assertEqual(data, original)
191 216
192 @hypothesis.settings( 217 @hypothesis.settings(
193 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 218 suppress_health_check=[
194 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 219 hypothesis.HealthCheck.large_base_example,
195 level=strategies.integers(min_value=1, max_value=5), 220 hypothesis.HealthCheck.too_slow,
196 streaming=strategies.booleans(), 221 ]
197 source_read_size=strategies.integers(1, 1048576), 222 )
198 read_sizes=strategies.data()) 223 @hypothesis.given(
199 def test_stream_source_read1_variance(self, original, level, streaming, 224 original=strategies.sampled_from(random_input_data()),
200 source_read_size, read_sizes): 225 level=strategies.integers(min_value=1, max_value=5),
226 streaming=strategies.booleans(),
227 source_read_size=strategies.integers(1, 1048576),
228 read_sizes=strategies.data(),
229 )
230 def test_stream_source_read1_variance(
231 self, original, level, streaming, source_read_size, read_sizes
232 ):
201 cctx = zstd.ZstdCompressor(level=level) 233 cctx = zstd.ZstdCompressor(level=level)
202 234
203 if streaming: 235 if streaming:
204 source = io.BytesIO() 236 source = io.BytesIO()
205 writer = cctx.stream_writer(source) 237 writer = cctx.stream_writer(source)
220 if not chunk and read_size: 252 if not chunk and read_size:
221 break 253 break
222 254
223 chunks.append(chunk) 255 chunks.append(chunk)
224 256
225 self.assertEqual(b''.join(chunks), original) 257 self.assertEqual(b"".join(chunks), original)
226 258
227 @hypothesis.settings( 259 @hypothesis.settings(
228 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 260 suppress_health_check=[
229 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 261 hypothesis.HealthCheck.large_base_example,
230 level=strategies.integers(min_value=1, max_value=5), 262 hypothesis.HealthCheck.too_slow,
231 streaming=strategies.booleans(), 263 ]
232 source_read_size=strategies.integers(1, 1048576), 264 )
233 read_sizes=strategies.data()) 265 @hypothesis.given(
234 def test_stream_source_readinto1_variance(self, original, level, streaming, 266 original=strategies.sampled_from(random_input_data()),
235 source_read_size, read_sizes): 267 level=strategies.integers(min_value=1, max_value=5),
268 streaming=strategies.booleans(),
269 source_read_size=strategies.integers(1, 1048576),
270 read_sizes=strategies.data(),
271 )
272 def test_stream_source_readinto1_variance(
273 self, original, level, streaming, source_read_size, read_sizes
274 ):
236 cctx = zstd.ZstdCompressor(level=level) 275 cctx = zstd.ZstdCompressor(level=level)
237 276
238 if streaming: 277 if streaming:
239 source = io.BytesIO() 278 source = io.BytesIO()
240 writer = cctx.stream_writer(source) 279 writer = cctx.stream_writer(source)
257 if not count: 296 if not count:
258 break 297 break
259 298
260 chunks.append(bytes(b[0:count])) 299 chunks.append(bytes(b[0:count]))
261 300
262 self.assertEqual(b''.join(chunks), original) 301 self.assertEqual(b"".join(chunks), original)
263 302
264 @hypothesis.settings( 303 @hypothesis.settings(
265 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 304 suppress_health_check=[
305 hypothesis.HealthCheck.large_base_example,
306 hypothesis.HealthCheck.too_slow,
307 ]
308 )
266 @hypothesis.given( 309 @hypothesis.given(
267 original=strategies.sampled_from(random_input_data()), 310 original=strategies.sampled_from(random_input_data()),
268 level=strategies.integers(min_value=1, max_value=5), 311 level=strategies.integers(min_value=1, max_value=5),
269 source_read_size=strategies.integers(1, 1048576), 312 source_read_size=strategies.integers(1, 1048576),
270 seek_amounts=strategies.data(), 313 seek_amounts=strategies.data(),
271 read_sizes=strategies.data()) 314 read_sizes=strategies.data(),
272 def test_relative_seeks(self, original, level, source_read_size, seek_amounts, 315 )
273 read_sizes): 316 def test_relative_seeks(
317 self, original, level, source_read_size, seek_amounts, read_sizes
318 ):
274 cctx = zstd.ZstdCompressor(level=level) 319 cctx = zstd.ZstdCompressor(level=level)
275 frame = cctx.compress(original) 320 frame = cctx.compress(original)
276 321
277 dctx = zstd.ZstdDecompressor() 322 dctx = zstd.ZstdDecompressor()
278 323
286 chunk = reader.read(read_amount) 331 chunk = reader.read(read_amount)
287 332
288 if not chunk: 333 if not chunk:
289 break 334 break
290 335
291 self.assertEqual(original[offset:offset + len(chunk)], chunk) 336 self.assertEqual(original[offset : offset + len(chunk)], chunk)
292 337
293 @hypothesis.settings( 338 @hypothesis.settings(
294 suppress_health_check=[hypothesis.HealthCheck.large_base_example]) 339 suppress_health_check=[
340 hypothesis.HealthCheck.large_base_example,
341 hypothesis.HealthCheck.too_slow,
342 ]
343 )
295 @hypothesis.given( 344 @hypothesis.given(
296 originals=strategies.data(), 345 originals=strategies.data(),
297 frame_count=strategies.integers(min_value=2, max_value=10), 346 frame_count=strategies.integers(min_value=2, max_value=10),
298 level=strategies.integers(min_value=1, max_value=5), 347 level=strategies.integers(min_value=1, max_value=5),
299 source_read_size=strategies.integers(1, 1048576), 348 source_read_size=strategies.integers(1, 1048576),
300 read_sizes=strategies.data()) 349 read_sizes=strategies.data(),
301 def test_multiple_frames(self, originals, frame_count, level, 350 )
302 source_read_size, read_sizes): 351 def test_multiple_frames(
352 self, originals, frame_count, level, source_read_size, read_sizes
353 ):
303 354
304 cctx = zstd.ZstdCompressor(level=level) 355 cctx = zstd.ZstdCompressor(level=level)
305 source = io.BytesIO() 356 source = io.BytesIO()
306 buffer = io.BytesIO() 357 buffer = io.BytesIO()
307 writer = cctx.stream_writer(buffer) 358 writer = cctx.stream_writer(buffer)
312 writer.write(data) 363 writer.write(data)
313 writer.flush(zstd.FLUSH_FRAME) 364 writer.flush(zstd.FLUSH_FRAME)
314 365
315 dctx = zstd.ZstdDecompressor() 366 dctx = zstd.ZstdDecompressor()
316 buffer.seek(0) 367 buffer.seek(0)
317 reader = dctx.stream_reader(buffer, read_size=source_read_size, 368 reader = dctx.stream_reader(
318 read_across_frames=True) 369 buffer, read_size=source_read_size, read_across_frames=True
370 )
319 371
320 chunks = [] 372 chunks = []
321 373
322 while True: 374 while True:
323 read_amount = read_sizes.draw(strategies.integers(-1, 16384)) 375 read_amount = read_sizes.draw(strategies.integers(-1, 16384))
326 if not chunk and read_amount: 378 if not chunk and read_amount:
327 break 379 break
328 380
329 chunks.append(chunk) 381 chunks.append(chunk)
330 382
331 self.assertEqual(source.getvalue(), b''.join(chunks)) 383 self.assertEqual(source.getvalue(), b"".join(chunks))
332 384
333 385
334 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set') 386 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
335 @make_cffi 387 @make_cffi
336 class TestDecompressor_stream_writer_fuzzing(unittest.TestCase): 388 class TestDecompressor_stream_writer_fuzzing(TestCase):
337 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 389 @hypothesis.settings(
338 level=strategies.integers(min_value=1, max_value=5), 390 suppress_health_check=[
339 write_size=strategies.integers(min_value=1, max_value=8192), 391 hypothesis.HealthCheck.large_base_example,
340 input_sizes=strategies.data()) 392 hypothesis.HealthCheck.too_slow,
393 ]
394 )
395 @hypothesis.given(
396 original=strategies.sampled_from(random_input_data()),
397 level=strategies.integers(min_value=1, max_value=5),
398 write_size=strategies.integers(min_value=1, max_value=8192),
399 input_sizes=strategies.data(),
400 )
341 def test_write_size_variance(self, original, level, write_size, input_sizes): 401 def test_write_size_variance(self, original, level, write_size, input_sizes):
342 cctx = zstd.ZstdCompressor(level=level) 402 cctx = zstd.ZstdCompressor(level=level)
343 frame = cctx.compress(original) 403 frame = cctx.compress(original)
344 404
345 dctx = zstd.ZstdDecompressor() 405 dctx = zstd.ZstdDecompressor()
356 decompressor.write(chunk) 416 decompressor.write(chunk)
357 417
358 self.assertEqual(dest.getvalue(), original) 418 self.assertEqual(dest.getvalue(), original)
359 419
360 420
361 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set') 421 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
362 @make_cffi 422 @make_cffi
363 class TestDecompressor_copy_stream_fuzzing(unittest.TestCase): 423 class TestDecompressor_copy_stream_fuzzing(TestCase):
364 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 424 @hypothesis.settings(
365 level=strategies.integers(min_value=1, max_value=5), 425 suppress_health_check=[
366 read_size=strategies.integers(min_value=1, max_value=8192), 426 hypothesis.HealthCheck.large_base_example,
367 write_size=strategies.integers(min_value=1, max_value=8192)) 427 hypothesis.HealthCheck.too_slow,
428 ]
429 )
430 @hypothesis.given(
431 original=strategies.sampled_from(random_input_data()),
432 level=strategies.integers(min_value=1, max_value=5),
433 read_size=strategies.integers(min_value=1, max_value=8192),
434 write_size=strategies.integers(min_value=1, max_value=8192),
435 )
368 def test_read_write_size_variance(self, original, level, read_size, write_size): 436 def test_read_write_size_variance(self, original, level, read_size, write_size):
369 cctx = zstd.ZstdCompressor(level=level) 437 cctx = zstd.ZstdCompressor(level=level)
370 frame = cctx.compress(original) 438 frame = cctx.compress(original)
371 439
372 source = io.BytesIO(frame) 440 source = io.BytesIO(frame)
376 dctx.copy_stream(source, dest, read_size=read_size, write_size=write_size) 444 dctx.copy_stream(source, dest, read_size=read_size, write_size=write_size)
377 445
378 self.assertEqual(dest.getvalue(), original) 446 self.assertEqual(dest.getvalue(), original)
379 447
380 448
381 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set') 449 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
382 @make_cffi 450 @make_cffi
383 class TestDecompressor_decompressobj_fuzzing(unittest.TestCase): 451 class TestDecompressor_decompressobj_fuzzing(TestCase):
384 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 452 @hypothesis.settings(
385 level=strategies.integers(min_value=1, max_value=5), 453 suppress_health_check=[
386 chunk_sizes=strategies.data()) 454 hypothesis.HealthCheck.large_base_example,
455 hypothesis.HealthCheck.too_slow,
456 ]
457 )
458 @hypothesis.given(
459 original=strategies.sampled_from(random_input_data()),
460 level=strategies.integers(min_value=1, max_value=5),
461 chunk_sizes=strategies.data(),
462 )
387 def test_random_input_sizes(self, original, level, chunk_sizes): 463 def test_random_input_sizes(self, original, level, chunk_sizes):
388 cctx = zstd.ZstdCompressor(level=level) 464 cctx = zstd.ZstdCompressor(level=level)
389 frame = cctx.compress(original) 465 frame = cctx.compress(original)
390 466
391 source = io.BytesIO(frame) 467 source = io.BytesIO(frame)
400 if not chunk: 476 if not chunk:
401 break 477 break
402 478
403 chunks.append(dobj.decompress(chunk)) 479 chunks.append(dobj.decompress(chunk))
404 480
405 self.assertEqual(b''.join(chunks), original) 481 self.assertEqual(b"".join(chunks), original)
406 482
407 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 483 @hypothesis.settings(
408 level=strategies.integers(min_value=1, max_value=5), 484 suppress_health_check=[
409 write_size=strategies.integers(min_value=1, 485 hypothesis.HealthCheck.large_base_example,
410 max_value=4 * zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE), 486 hypothesis.HealthCheck.too_slow,
411 chunk_sizes=strategies.data()) 487 ]
488 )
489 @hypothesis.given(
490 original=strategies.sampled_from(random_input_data()),
491 level=strategies.integers(min_value=1, max_value=5),
492 write_size=strategies.integers(
493 min_value=1, max_value=4 * zstd.DECOMPRESSION_RECOMMENDED_OUTPUT_SIZE
494 ),
495 chunk_sizes=strategies.data(),
496 )
412 def test_random_output_sizes(self, original, level, write_size, chunk_sizes): 497 def test_random_output_sizes(self, original, level, write_size, chunk_sizes):
413 cctx = zstd.ZstdCompressor(level=level) 498 cctx = zstd.ZstdCompressor(level=level)
414 frame = cctx.compress(original) 499 frame = cctx.compress(original)
415 500
416 source = io.BytesIO(frame) 501 source = io.BytesIO(frame)
425 if not chunk: 510 if not chunk:
426 break 511 break
427 512
428 chunks.append(dobj.decompress(chunk)) 513 chunks.append(dobj.decompress(chunk))
429 514
430 self.assertEqual(b''.join(chunks), original) 515 self.assertEqual(b"".join(chunks), original)
431 516
432 517
433 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set') 518 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
434 @make_cffi 519 @make_cffi
435 class TestDecompressor_read_to_iter_fuzzing(unittest.TestCase): 520 class TestDecompressor_read_to_iter_fuzzing(TestCase):
436 @hypothesis.given(original=strategies.sampled_from(random_input_data()), 521 @hypothesis.given(
437 level=strategies.integers(min_value=1, max_value=5), 522 original=strategies.sampled_from(random_input_data()),
438 read_size=strategies.integers(min_value=1, max_value=4096), 523 level=strategies.integers(min_value=1, max_value=5),
439 write_size=strategies.integers(min_value=1, max_value=4096)) 524 read_size=strategies.integers(min_value=1, max_value=4096),
525 write_size=strategies.integers(min_value=1, max_value=4096),
526 )
440 def test_read_write_size_variance(self, original, level, read_size, write_size): 527 def test_read_write_size_variance(self, original, level, read_size, write_size):
441 cctx = zstd.ZstdCompressor(level=level) 528 cctx = zstd.ZstdCompressor(level=level)
442 frame = cctx.compress(original) 529 frame = cctx.compress(original)
443 530
444 source = io.BytesIO(frame) 531 source = io.BytesIO(frame)
445 532
446 dctx = zstd.ZstdDecompressor() 533 dctx = zstd.ZstdDecompressor()
447 chunks = list(dctx.read_to_iter(source, read_size=read_size, write_size=write_size)) 534 chunks = list(
448 535 dctx.read_to_iter(source, read_size=read_size, write_size=write_size)
449 self.assertEqual(b''.join(chunks), original) 536 )
450 537
451 538 self.assertEqual(b"".join(chunks), original)
452 @unittest.skipUnless('ZSTD_SLOW_TESTS' in os.environ, 'ZSTD_SLOW_TESTS not set') 539
453 class TestDecompressor_multi_decompress_to_buffer_fuzzing(unittest.TestCase): 540
454 @hypothesis.given(original=strategies.lists(strategies.sampled_from(random_input_data()), 541 @unittest.skipUnless("ZSTD_SLOW_TESTS" in os.environ, "ZSTD_SLOW_TESTS not set")
455 min_size=1, max_size=1024), 542 class TestDecompressor_multi_decompress_to_buffer_fuzzing(TestCase):
456 threads=strategies.integers(min_value=1, max_value=8), 543 @hypothesis.given(
457 use_dict=strategies.booleans()) 544 original=strategies.lists(
545 strategies.sampled_from(random_input_data()), min_size=1, max_size=1024
546 ),
547 threads=strategies.integers(min_value=1, max_value=8),
548 use_dict=strategies.booleans(),
549 )
458 def test_data_equivalence(self, original, threads, use_dict): 550 def test_data_equivalence(self, original, threads, use_dict):
459 kwargs = {} 551 kwargs = {}
460 if use_dict: 552 if use_dict:
461 kwargs['dict_data'] = zstd.ZstdCompressionDict(original[0]) 553 kwargs["dict_data"] = zstd.ZstdCompressionDict(original[0])
462 554
463 cctx = zstd.ZstdCompressor(level=1, 555 cctx = zstd.ZstdCompressor(
464 write_content_size=True, 556 level=1, write_content_size=True, write_checksum=True, **kwargs
465 write_checksum=True, 557 )
466 **kwargs) 558
467 559 if not hasattr(cctx, "multi_compress_to_buffer"):
468 if not hasattr(cctx, 'multi_compress_to_buffer'): 560 self.skipTest("multi_compress_to_buffer not available")
469 self.skipTest('multi_compress_to_buffer not available')
470 561
471 frames_buffer = cctx.multi_compress_to_buffer(original, threads=-1) 562 frames_buffer = cctx.multi_compress_to_buffer(original, threads=-1)
472 563
473 dctx = zstd.ZstdDecompressor(**kwargs) 564 dctx = zstd.ZstdDecompressor(**kwargs)
474 result = dctx.multi_decompress_to_buffer(frames_buffer) 565 result = dctx.multi_decompress_to_buffer(frames_buffer)