How to use decompress method in wpt

Best JavaScript code snippet using wpt

test_zlib.py

Source:test_zlib.py Github

copy

Full Screen

...95 self.assertRaises(ValueError, zlib.decompressobj().flush, -1)96 @support.cpython_only97 def test_overflow(self):98 with self.assertRaisesRegex(OverflowError, 'int too large'):99 zlib.decompress(b'', 15, sys.maxsize + 1)100 with self.assertRaisesRegex(OverflowError, 'int too large'):101 zlib.decompressobj().decompress(b'', sys.maxsize + 1)102 with self.assertRaisesRegex(OverflowError, 'int too large'):103 zlib.decompressobj().flush(sys.maxsize + 1)104class BaseCompressTestCase(object):105 def check_big_compress_buffer(self, size, compress_func):106 _1M = 1024 * 1024107 # Generate 10 MiB worth of random, and expand it by repeating it.108 # The assumption is that zlib's memory is not big enough to exploit109 # such spread out redundancy.110 data = b''.join([random.getrandbits(8 * _1M).to_bytes(_1M, 'little')111 for i in range(10)])112 data = data * (size // len(data) + 1)113 try:114 compress_func(data)115 finally:116 # Release memory117 data = None118 def check_big_decompress_buffer(self, size, decompress_func):119 data = b'x' * size120 try:121 compressed = zlib.compress(data, 1)122 finally:123 # Release memory124 data = None125 data = decompress_func(compressed)126 # Sanity check127 try:128 self.assertEqual(len(data), size)129 self.assertEqual(len(data.strip(b'x')), 0)130 finally:131 data = None132class CompressTestCase(BaseCompressTestCase, unittest.TestCase):133 # Test compression in one go (whole message compression)134 def test_speech(self):135 x = zlib.compress(HAMLET_SCENE)136 self.assertEqual(zlib.decompress(x), HAMLET_SCENE)137 def test_keywords(self):138 x = zlib.compress(HAMLET_SCENE, level=3)139 self.assertEqual(zlib.decompress(x), HAMLET_SCENE)140 with self.assertRaises(TypeError):141 zlib.compress(data=HAMLET_SCENE, level=3)142 self.assertEqual(zlib.decompress(x,143 wbits=zlib.MAX_WBITS,144 bufsize=zlib.DEF_BUF_SIZE),145 HAMLET_SCENE)146 def test_speech128(self):147 # compress more data148 data = HAMLET_SCENE * 128149 x = zlib.compress(data)150 self.assertEqual(zlib.compress(bytearray(data)), x)151 for ob in x, bytearray(x):152 self.assertEqual(zlib.decompress(ob), data)153 def test_incomplete_stream(self):154 # A useful error message is given155 x = zlib.compress(HAMLET_SCENE)156 self.assertRaisesRegex(zlib.error,157 "Error -5 while decompressing data: incomplete or truncated stream",158 zlib.decompress, x[:-1])159 # Memory use of the following functions takes into account overallocation160 @bigmemtest(size=_1G + 1024 * 1024, memuse=3)161 def test_big_compress_buffer(self, size):162 compress = lambda s: zlib.compress(s, 1)163 self.check_big_compress_buffer(size, compress)164 @bigmemtest(size=_1G + 1024 * 1024, memuse=2)165 def test_big_decompress_buffer(self, size):166 self.check_big_decompress_buffer(size, zlib.decompress)167 @bigmemtest(size=_4G, memuse=1)168 def test_large_bufsize(self, size):169 # Test decompress(bufsize) parameter greater than the internal limit170 data = HAMLET_SCENE * 10171 compressed = zlib.compress(data, 1)172 self.assertEqual(zlib.decompress(compressed, 15, size), data)173 def test_custom_bufsize(self):174 data = HAMLET_SCENE * 10175 compressed = zlib.compress(data, 1)176 self.assertEqual(zlib.decompress(compressed, 15, CustomInt()), data)177 @unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform')178 @bigmemtest(size=_4G + 100, memuse=4)179 def test_64bit_compress(self, size):180 data = b'x' * size181 try:182 comp = zlib.compress(data, 0)183 self.assertEqual(zlib.decompress(comp), data)184 finally:185 comp = data = None186class CompressObjectTestCase(BaseCompressTestCase, unittest.TestCase):187 # Test compression object188 def test_pair(self):189 # straightforward compress/decompress objects190 datasrc = HAMLET_SCENE * 128191 datazip = zlib.compress(datasrc)192 # should compress both bytes and bytearray data193 for data in (datasrc, bytearray(datasrc)):194 co = zlib.compressobj()195 x1 = co.compress(data)196 x2 = co.flush()197 self.assertRaises(zlib.error, co.flush) # second flush should not work198 self.assertEqual(x1 + x2, datazip)199 for v1, v2 in ((x1, x2), (bytearray(x1), bytearray(x2))):200 dco = zlib.decompressobj()201 y1 = dco.decompress(v1 + v2)202 y2 = dco.flush()203 self.assertEqual(data, y1 + y2)204 self.assertIsInstance(dco.unconsumed_tail, bytes)205 self.assertIsInstance(dco.unused_data, bytes)206 def test_keywords(self):207 level = 2208 method = zlib.DEFLATED209 wbits = -12210 memLevel = 9211 strategy = zlib.Z_FILTERED212 co = zlib.compressobj(level=level,213 method=method,214 wbits=wbits,215 memLevel=memLevel,216 strategy=strategy,217 zdict=b"")218 do = zlib.decompressobj(wbits=wbits, zdict=b"")219 with self.assertRaises(TypeError):220 co.compress(data=HAMLET_SCENE)221 with self.assertRaises(TypeError):222 do.decompress(data=zlib.compress(HAMLET_SCENE))223 x = co.compress(HAMLET_SCENE) + co.flush()224 y = do.decompress(x, max_length=len(HAMLET_SCENE)) + do.flush()225 self.assertEqual(HAMLET_SCENE, y)226 def test_compressoptions(self):227 # specify lots of options to compressobj()228 level = 2229 method = zlib.DEFLATED230 wbits = -12231 memLevel = 9232 strategy = zlib.Z_FILTERED233 co = zlib.compressobj(level, method, wbits, memLevel, strategy)234 x1 = co.compress(HAMLET_SCENE)235 x2 = co.flush()236 dco = zlib.decompressobj(wbits)237 y1 = dco.decompress(x1 + x2)238 y2 = dco.flush()239 self.assertEqual(HAMLET_SCENE, y1 + y2)240 def test_compressincremental(self):241 # compress object in steps, decompress object as one-shot242 data = HAMLET_SCENE * 128243 co = zlib.compressobj()244 bufs = []245 for i in range(0, len(data), 256):246 bufs.append(co.compress(data[i:i+256]))247 bufs.append(co.flush())248 combuf = b''.join(bufs)249 dco = zlib.decompressobj()250 y1 = dco.decompress(b''.join(bufs))251 y2 = dco.flush()252 self.assertEqual(data, y1 + y2)253 def test_decompinc(self, flush=False, source=None, cx=256, dcx=64):254 # compress object in steps, decompress object in steps255 source = source or HAMLET_SCENE256 data = source * 128257 co = zlib.compressobj()258 bufs = []259 for i in range(0, len(data), cx):260 bufs.append(co.compress(data[i:i+cx]))261 bufs.append(co.flush())262 combuf = b''.join(bufs)263 decombuf = zlib.decompress(combuf)264 # Test type of return value265 self.assertIsInstance(decombuf, bytes)266 self.assertEqual(data, decombuf)267 dco = zlib.decompressobj()268 bufs = []269 for i in range(0, len(combuf), dcx):270 bufs.append(dco.decompress(combuf[i:i+dcx]))271 self.assertEqual(b'', dco.unconsumed_tail, ########272 "(A) uct should be b'': not %d long" %273 len(dco.unconsumed_tail))274 self.assertEqual(b'', dco.unused_data)275 if flush:276 bufs.append(dco.flush())277 else:278 while True:279 chunk = dco.decompress(b'')280 if chunk:281 bufs.append(chunk)282 else:283 break284 self.assertEqual(b'', dco.unconsumed_tail, ########285 "(B) uct should be b'': not %d long" %286 len(dco.unconsumed_tail))287 self.assertEqual(b'', dco.unused_data)288 self.assertEqual(data, b''.join(bufs))289 # Failure means: "decompressobj with init options failed"290 def test_decompincflush(self):291 self.test_decompinc(flush=True)292 def test_decompimax(self, source=None, cx=256, dcx=64):293 # compress in steps, decompress in length-restricted steps294 source = source or HAMLET_SCENE295 # Check a decompression object with max_length specified296 data = source * 128297 co = zlib.compressobj()298 bufs = []299 for i in range(0, len(data), cx):300 bufs.append(co.compress(data[i:i+cx]))301 bufs.append(co.flush())302 combuf = b''.join(bufs)303 self.assertEqual(data, zlib.decompress(combuf),304 'compressed data failure')305 dco = zlib.decompressobj()306 bufs = []307 cb = combuf308 while cb:309 #max_length = 1 + len(cb)//10310 chunk = dco.decompress(cb, dcx)311 self.assertFalse(len(chunk) > dcx,312 'chunk too big (%d>%d)' % (len(chunk), dcx))313 bufs.append(chunk)314 cb = dco.unconsumed_tail315 bufs.append(dco.flush())316 self.assertEqual(data, b''.join(bufs), 'Wrong data retrieved')317 def test_decompressmaxlen(self, flush=False):318 # Check a decompression object with max_length specified319 data = HAMLET_SCENE * 128320 co = zlib.compressobj()321 bufs = []322 for i in range(0, len(data), 256):323 bufs.append(co.compress(data[i:i+256]))324 bufs.append(co.flush())325 combuf = b''.join(bufs)326 self.assertEqual(data, zlib.decompress(combuf),327 'compressed data failure')328 dco = zlib.decompressobj()329 bufs = []330 cb = combuf331 while cb:332 max_length = 1 + len(cb)//10333 chunk = dco.decompress(cb, max_length)334 self.assertFalse(len(chunk) > max_length,335 'chunk too big (%d>%d)' % (len(chunk),max_length))336 bufs.append(chunk)337 cb = dco.unconsumed_tail338 if flush:339 bufs.append(dco.flush())340 else:341 while chunk:342 chunk = dco.decompress(b'', max_length)343 self.assertFalse(len(chunk) > max_length,344 'chunk too big (%d>%d)' % (len(chunk),max_length))345 bufs.append(chunk)346 self.assertEqual(data, b''.join(bufs), 'Wrong data retrieved')347 def test_decompressmaxlenflush(self):348 self.test_decompressmaxlen(flush=True)349 def test_maxlenmisc(self):350 # Misc tests of max_length351 dco = zlib.decompressobj()352 self.assertRaises(ValueError, dco.decompress, b"", -1)353 self.assertEqual(b'', dco.unconsumed_tail)354 def test_maxlen_large(self):355 # Sizes up to sys.maxsize should be accepted, although zlib is356 # internally limited to expressing sizes with unsigned int357 data = HAMLET_SCENE * 10358 self.assertGreater(len(data), zlib.DEF_BUF_SIZE)359 compressed = zlib.compress(data, 1)360 dco = zlib.decompressobj()361 self.assertEqual(dco.decompress(compressed, sys.maxsize), data)362 def test_maxlen_custom(self):363 data = HAMLET_SCENE * 10364 compressed = zlib.compress(data, 1)365 dco = zlib.decompressobj()366 self.assertEqual(dco.decompress(compressed, CustomInt()), data[:100])367 def test_clear_unconsumed_tail(self):368 # Issue #12050: calling decompress() without providing max_length369 # should clear the unconsumed_tail attribute.370 cdata = b"x\x9cKLJ\x06\x00\x02M\x01" # "abc"371 dco = zlib.decompressobj()372 ddata = dco.decompress(cdata, 1)373 ddata += dco.decompress(dco.unconsumed_tail)374 self.assertEqual(dco.unconsumed_tail, b"")375 def test_flushes(self):376 # Test flush() with the various options, using all the377 # different levels in order to provide more variations.378 sync_opt = ['Z_NO_FLUSH', 'Z_SYNC_FLUSH', 'Z_FULL_FLUSH',379 'Z_PARTIAL_FLUSH', 'Z_BLOCK']380 sync_opt = [getattr(zlib, opt) for opt in sync_opt381 if hasattr(zlib, opt)]382 data = HAMLET_SCENE * 8383 for sync in sync_opt:384 for level in range(10):385 obj = zlib.compressobj( level )386 a = obj.compress( data[:3000] )387 b = obj.flush( sync )388 c = obj.compress( data[3000:] )389 d = obj.flush()390 self.assertEqual(zlib.decompress(b''.join([a,b,c,d])),391 data, ("Decompress failed: flush "392 "mode=%i, level=%i") % (sync, level))393 del obj394 @unittest.skipUnless(hasattr(zlib, 'Z_SYNC_FLUSH'),395 'requires zlib.Z_SYNC_FLUSH')396 def test_odd_flush(self):397 # Test for odd flushing bugs noted in 2.0, and hopefully fixed in 2.1398 import random399 # Testing on 17K of "random" data400 # Create compressor and decompressor objects401 co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)402 dco = zlib.decompressobj()403 # Try 17K of data404 # generate random data stream405 try:406 # In 2.3 and later, WichmannHill is the RNG of the bug report407 gen = random.WichmannHill()408 except AttributeError:409 try:410 # 2.2 called it Random411 gen = random.Random()412 except AttributeError:413 # others might simply have a single RNG414 gen = random415 gen.seed(1)416 data = genblock(1, 17 * 1024, generator=gen)417 # compress, sync-flush, and decompress418 first = co.compress(data)419 second = co.flush(zlib.Z_SYNC_FLUSH)420 expanded = dco.decompress(first + second)421 # if decompressed data is different from the input data, choke.422 self.assertEqual(expanded, data, "17K random source doesn't match")423 def test_empty_flush(self):424 # Test that calling .flush() on unused objects works.425 # (Bug #1083110 -- calling .flush() on decompress objects426 # caused a core dump.)427 co = zlib.compressobj(zlib.Z_BEST_COMPRESSION)428 self.assertTrue(co.flush()) # Returns a zlib header429 dco = zlib.decompressobj()430 self.assertEqual(dco.flush(), b"") # Returns nothing431 def test_dictionary(self):432 h = HAMLET_SCENE433 # Build a simulated dictionary out of the words in HAMLET.434 words = h.split()435 random.shuffle(words)436 zdict = b''.join(words)437 # Use it to compress HAMLET.438 co = zlib.compressobj(zdict=zdict)439 cd = co.compress(h) + co.flush()440 # Verify that it will decompress with the dictionary.441 dco = zlib.decompressobj(zdict=zdict)442 self.assertEqual(dco.decompress(cd) + dco.flush(), h)443 # Verify that it fails when not given the dictionary.444 dco = zlib.decompressobj()445 self.assertRaises(zlib.error, dco.decompress, cd)446 def test_dictionary_streaming(self):447 # This simulates the reuse of a compressor object for compressing448 # several separate data streams.449 co = zlib.compressobj(zdict=HAMLET_SCENE)450 do = zlib.decompressobj(zdict=HAMLET_SCENE)451 piece = HAMLET_SCENE[1000:1500]452 d0 = co.compress(piece) + co.flush(zlib.Z_SYNC_FLUSH)453 d1 = co.compress(piece[100:]) + co.flush(zlib.Z_SYNC_FLUSH)454 d2 = co.compress(piece[:-100]) + co.flush(zlib.Z_SYNC_FLUSH)455 self.assertEqual(do.decompress(d0), piece)456 self.assertEqual(do.decompress(d1), piece[100:])457 self.assertEqual(do.decompress(d2), piece[:-100])458 def test_decompress_incomplete_stream(self):459 # This is 'foo', deflated460 x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E'461 # For the record462 self.assertEqual(zlib.decompress(x), b'foo')463 self.assertRaises(zlib.error, zlib.decompress, x[:-5])464 # Omitting the stream end works with decompressor objects465 # (see issue #8672).466 dco = zlib.decompressobj()467 y = dco.decompress(x[:-5])468 y += dco.flush()469 self.assertEqual(y, b'foo')470 def test_decompress_eof(self):471 x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E' # 'foo'472 dco = zlib.decompressobj()473 self.assertFalse(dco.eof)474 dco.decompress(x[:-5])475 self.assertFalse(dco.eof)476 dco.decompress(x[-5:])477 self.assertTrue(dco.eof)478 dco.flush()479 self.assertTrue(dco.eof)480 def test_decompress_eof_incomplete_stream(self):481 x = b'x\x9cK\xcb\xcf\x07\x00\x02\x82\x01E' # 'foo'482 dco = zlib.decompressobj()483 self.assertFalse(dco.eof)484 dco.decompress(x[:-5])485 self.assertFalse(dco.eof)486 dco.flush()487 self.assertFalse(dco.eof)488 def test_decompress_unused_data(self):489 # Repeated calls to decompress() after EOF should accumulate data in490 # dco.unused_data, instead of just storing the arg to the last call.491 source = b'abcdefghijklmnopqrstuvwxyz'492 remainder = b'0123456789'493 y = zlib.compress(source)494 x = y + remainder495 for maxlen in 0, 1000:496 for step in 1, 2, len(y), len(x):497 dco = zlib.decompressobj()498 data = b''499 for i in range(0, len(x), step):500 if i < len(y):501 self.assertEqual(dco.unused_data, b'')502 if maxlen == 0:503 data += dco.decompress(x[i : i + step])504 self.assertEqual(dco.unconsumed_tail, b'')505 else:506 data += dco.decompress(507 dco.unconsumed_tail + x[i : i + step], maxlen)508 data += dco.flush()509 self.assertTrue(dco.eof)510 self.assertEqual(data, source)511 self.assertEqual(dco.unconsumed_tail, b'')512 self.assertEqual(dco.unused_data, remainder)513 # issue27164514 def test_decompress_raw_with_dictionary(self):515 zdict = b'abcdefghijklmnopqrstuvwxyz'516 co = zlib.compressobj(wbits=-zlib.MAX_WBITS, zdict=zdict)517 comp = co.compress(zdict) + co.flush()518 dco = zlib.decompressobj(wbits=-zlib.MAX_WBITS, zdict=zdict)519 uncomp = dco.decompress(comp) + dco.flush()520 self.assertEqual(zdict, uncomp)521 def test_flush_with_freed_input(self):522 # Issue #16411: decompressor accesses input to last decompress() call523 # in flush(), even if this object has been freed in the meanwhile.524 input1 = b'abcdefghijklmnopqrstuvwxyz'525 input2 = b'QWERTYUIOPASDFGHJKLZXCVBNM'526 data = zlib.compress(input1)527 dco = zlib.decompressobj()528 dco.decompress(data, 1)529 del data530 data = zlib.compress(input2)531 self.assertEqual(dco.flush(), input1[1:])532 @bigmemtest(size=_4G, memuse=1)533 def test_flush_large_length(self, size):534 # Test flush(length) parameter greater than internal limit UINT_MAX535 input = HAMLET_SCENE * 10536 data = zlib.compress(input, 1)537 dco = zlib.decompressobj()538 dco.decompress(data, 1)539 self.assertEqual(dco.flush(size), input[1:])540 def test_flush_custom_length(self):541 input = HAMLET_SCENE * 10542 data = zlib.compress(input, 1)543 dco = zlib.decompressobj()544 dco.decompress(data, 1)545 self.assertEqual(dco.flush(CustomInt()), input[1:])546 @requires_Compress_copy547 def test_compresscopy(self):548 # Test copying a compression object549 data0 = HAMLET_SCENE550 data1 = bytes(str(HAMLET_SCENE, "ascii").swapcase(), "ascii")551 c0 = zlib.compressobj(zlib.Z_BEST_COMPRESSION)552 bufs0 = []553 bufs0.append(c0.compress(data0))554 c1 = c0.copy()555 bufs1 = bufs0[:]556 bufs0.append(c0.compress(data0))557 bufs0.append(c0.flush())558 s0 = b''.join(bufs0)559 bufs1.append(c1.compress(data1))560 bufs1.append(c1.flush())561 s1 = b''.join(bufs1)562 self.assertEqual(zlib.decompress(s0),data0+data0)563 self.assertEqual(zlib.decompress(s1),data0+data1)564 @requires_Compress_copy565 def test_badcompresscopy(self):566 # Test copying a compression object in an inconsistent state567 c = zlib.compressobj()568 c.compress(HAMLET_SCENE)569 c.flush()570 self.assertRaises(ValueError, c.copy)571 @requires_Decompress_copy572 def test_decompresscopy(self):573 # Test copying a decompression object574 data = HAMLET_SCENE575 comp = zlib.compress(data)576 # Test type of return value577 self.assertIsInstance(comp, bytes)578 d0 = zlib.decompressobj()579 bufs0 = []580 bufs0.append(d0.decompress(comp[:32]))581 d1 = d0.copy()582 bufs1 = bufs0[:]583 bufs0.append(d0.decompress(comp[32:]))584 s0 = b''.join(bufs0)585 bufs1.append(d1.decompress(comp[32:]))586 s1 = b''.join(bufs1)587 self.assertEqual(s0,s1)588 self.assertEqual(s0,data)589 @requires_Decompress_copy590 def test_baddecompresscopy(self):591 # Test copying a compression object in an inconsistent state592 data = zlib.compress(HAMLET_SCENE)593 d = zlib.decompressobj()594 d.decompress(data)595 d.flush()596 self.assertRaises(ValueError, d.copy)597 def test_compresspickle(self):598 for proto in range(pickle.HIGHEST_PROTOCOL + 1):599 with self.assertRaises((TypeError, pickle.PicklingError)):600 pickle.dumps(zlib.compressobj(zlib.Z_BEST_COMPRESSION), proto)601 def test_decompresspickle(self):602 for proto in range(pickle.HIGHEST_PROTOCOL + 1):603 with self.assertRaises((TypeError, pickle.PicklingError)):604 pickle.dumps(zlib.decompressobj(), proto)605 # Memory use of the following functions takes into account overallocation606 @bigmemtest(size=_1G + 1024 * 1024, memuse=3)607 def test_big_compress_buffer(self, size):608 c = zlib.compressobj(1)609 compress = lambda s: c.compress(s) + c.flush()610 self.check_big_compress_buffer(size, compress)611 @bigmemtest(size=_1G + 1024 * 1024, memuse=2)612 def test_big_decompress_buffer(self, size):613 d = zlib.decompressobj()614 decompress = lambda s: d.decompress(s) + d.flush()615 self.check_big_decompress_buffer(size, decompress)616 @unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform')617 @bigmemtest(size=_4G + 100, memuse=4)618 def test_64bit_compress(self, size):619 data = b'x' * size620 co = zlib.compressobj(0)621 do = zlib.decompressobj()622 try:623 comp = co.compress(data) + co.flush()624 uncomp = do.decompress(comp) + do.flush()625 self.assertEqual(uncomp, data)626 finally:627 comp = uncomp = data = None628 @unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform')629 @bigmemtest(size=_4G + 100, memuse=3)630 def test_large_unused_data(self, size):631 data = b'abcdefghijklmnop'632 unused = b'x' * size633 comp = zlib.compress(data) + unused634 do = zlib.decompressobj()635 try:636 uncomp = do.decompress(comp) + do.flush()637 self.assertEqual(unused, do.unused_data)638 self.assertEqual(uncomp, data)639 finally:640 unused = comp = do = None641 @unittest.skipUnless(sys.maxsize > 2**32, 'requires 64bit platform')642 @bigmemtest(size=_4G + 100, memuse=5)643 def test_large_unconsumed_tail(self, size):644 data = b'x' * size645 do = zlib.decompressobj()646 try:647 comp = zlib.compress(data, 0)648 uncomp = do.decompress(comp, 1) + do.flush()649 self.assertEqual(uncomp, data)650 self.assertEqual(do.unconsumed_tail, b'')651 finally:652 comp = uncomp = data = None653 def test_wbits(self):654 # wbits=0 only supported since zlib v1.2.3.5655 # Register "1.2.3" as "1.2.3.0"656 # or "1.2.0-linux","1.2.0.f","1.2.0.f-linux"657 v = zlib.ZLIB_RUNTIME_VERSION.split('-', 1)[0].split('.')658 if len(v) < 4:659 v.append('0')660 elif not v[-1].isnumeric():661 v[-1] = '0'662 v = tuple(map(int, v))663 supports_wbits_0 = v >= (1, 2, 3, 5)664 co = zlib.compressobj(level=1, wbits=15)665 zlib15 = co.compress(HAMLET_SCENE) + co.flush()666 self.assertEqual(zlib.decompress(zlib15, 15), HAMLET_SCENE)667 if supports_wbits_0:668 self.assertEqual(zlib.decompress(zlib15, 0), HAMLET_SCENE)669 self.assertEqual(zlib.decompress(zlib15, 32 + 15), HAMLET_SCENE)670 with self.assertRaisesRegex(zlib.error, 'invalid window size'):671 zlib.decompress(zlib15, 14)672 dco = zlib.decompressobj(wbits=32 + 15)673 self.assertEqual(dco.decompress(zlib15), HAMLET_SCENE)674 dco = zlib.decompressobj(wbits=14)675 with self.assertRaisesRegex(zlib.error, 'invalid window size'):676 dco.decompress(zlib15)677 co = zlib.compressobj(level=1, wbits=9)678 zlib9 = co.compress(HAMLET_SCENE) + co.flush()679 self.assertEqual(zlib.decompress(zlib9, 9), HAMLET_SCENE)680 self.assertEqual(zlib.decompress(zlib9, 15), HAMLET_SCENE)681 if supports_wbits_0:682 self.assertEqual(zlib.decompress(zlib9, 0), HAMLET_SCENE)683 self.assertEqual(zlib.decompress(zlib9, 32 + 9), HAMLET_SCENE)684 dco = zlib.decompressobj(wbits=32 + 9)685 self.assertEqual(dco.decompress(zlib9), HAMLET_SCENE)686 co = zlib.compressobj(level=1, wbits=-15)687 deflate15 = co.compress(HAMLET_SCENE) + co.flush()688 self.assertEqual(zlib.decompress(deflate15, -15), HAMLET_SCENE)689 dco = zlib.decompressobj(wbits=-15)690 self.assertEqual(dco.decompress(deflate15), HAMLET_SCENE)691 co = zlib.compressobj(level=1, wbits=-9)692 deflate9 = co.compress(HAMLET_SCENE) + co.flush()693 self.assertEqual(zlib.decompress(deflate9, -9), HAMLET_SCENE)694 self.assertEqual(zlib.decompress(deflate9, -15), HAMLET_SCENE)695 dco = zlib.decompressobj(wbits=-9)696 self.assertEqual(dco.decompress(deflate9), HAMLET_SCENE)697 co = zlib.compressobj(level=1, wbits=16 + 15)698 gzip = co.compress(HAMLET_SCENE) + co.flush()699 self.assertEqual(zlib.decompress(gzip, 16 + 15), HAMLET_SCENE)700 self.assertEqual(zlib.decompress(gzip, 32 + 15), HAMLET_SCENE)701 dco = zlib.decompressobj(32 + 15)702 self.assertEqual(dco.decompress(gzip), HAMLET_SCENE)703def genblock(seed, length, step=1024, generator=random):704 """length-byte stream of random data from a seed (in step-byte blocks)."""705 if seed is not None:706 generator.seed(seed)707 randint = generator.randint708 if length < step or step < 2:709 step = length710 blocks = bytes()711 for i in range(0, length, step):712 blocks += bytes(randint(0, 255) for x in range(step))713 return blocks714def choose_lines(source, number, seed=None, generator=random):715 """Return a list of number lines randomly chosen from the source"""716 if seed is not None:...

Full Screen

Full Screen

test-usertiming-decompression.js

Source:test-usertiming-decompression.js Github

copy

Full Screen

1/* eslint-env node, mocha */2(function(root) {3 "use strict";4 //5 // Run in either Mocha, Karma or Browser environments6 //7 if (typeof root === "undefined") {8 root = {};9 }10 var utd = root.UserTimingDecompression ?11 root.UserTimingDecompression :12 require("../dist/usertiming-decompression");13 var chai = root.chai ? root.chai : require("chai");14 var expect = chai.expect;15 //16 // UserTimingDecompression17 //18 describe("UserTimingDecompression", function() {19 //20 // decompressFromString21 //22 describe(".decompressFromString()", function() {23 it("should return an empty list for an empty string", function() {24 expect(utd.decompressFromString("")).to.deep.equal([]);25 });26 it("should return an empty list for non-objects/strings", function() {27 expect(utd.decompressFromString()).to.deep.equal([]);28 expect(utd.decompressFromString(null)).to.deep.equal([]);29 expect(utd.decompressFromString(1)).to.deep.equal([]);30 expect(utd.decompressFromString(0)).to.deep.equal([]);31 expect(utd.decompressFromString({})).to.deep.equal([]);32 expect(utd.decompressFromString(undefined)).to.deep.equal([]);33 expect(utd.decompressFromString(true)).to.deep.equal([]);34 expect(utd.decompressFromString(false)).to.deep.equal([]);35 });36 it("should return a single mark for an array-encoded URI", function() {37 expect(utd.decompressFromString("0mark1~1")).to.deep.equal([{38 name: "mark1",39 entryType: "mark",40 duration: 0,41 startTime: 142 }]);43 });44 it("should return a single mark for an trie-encoded URI", function() {45 expect(utd.decompressFromString("('mark1'~'1')")).to.deep.equal([{46 name: "mark1",47 entryType: "mark",48 duration: 0,49 startTime: 150 }]);51 });52 it("should return a single measure for an trie-encoded URI", function() {53 expect(utd.decompressFromString("('mark1'~'1_a')")).to.deep.equal([{54 name: "mark1",55 entryType: "measure",56 startTime: 1,57 duration: 1058 }]);59 });60 });61 //62 // decompressUriTrie63 //64 describe(".decompressUriTrie()", function() {65 it("should return an empty list for an empty object", function() {66 expect(utd.decompressUriTrie({})).to.deep.equal([]);67 });68 it("should return an empty list for non-objects/strings", function() {69 expect(utd.decompressUriTrie()).to.deep.equal([]);70 expect(utd.decompressUriTrie(null)).to.deep.equal([]);71 expect(utd.decompressUriTrie(1)).to.deep.equal([]);72 expect(utd.decompressUriTrie(0)).to.deep.equal([]);73 expect(utd.decompressUriTrie("")).to.deep.equal([]);74 expect(utd.decompressUriTrie(undefined)).to.deep.equal([]);75 expect(utd.decompressUriTrie(true)).to.deep.equal([]);76 expect(utd.decompressUriTrie(false)).to.deep.equal([]);77 });78 it("should return an empty object for strings that aren't valid uri-JSON", function() {79 expect(utd.decompressUriTrie("a")).to.deep.equal([]);80 expect(utd.decompressUriTrie("a:/")).to.deep.equal([]);81 expect(utd.decompressUriTrie("{a:~")).to.deep.equal([]);82 expect(utd.decompressUriTrie("b")).to.deep.equal([]);83 });84 it("should return UserTimings for a simple mark object-JSON", function() {85 expect(utd.decompressUriTrie({ "a": "1" })).to.deep.equal([{86 name: "a",87 entryType: "mark",88 duration: 0,89 startTime: 190 }]);91 });92 it("should return UserTimings for a simple mark object-JSON URI-encoded", function() {93 expect(utd.decompressUriTrie("('a'~'1')")).to.deep.equal([{94 name: "a",95 entryType: "mark",96 duration: 0,97 startTime: 198 }]);99 });100 it("should return UserTimings for a simple mark JSURL encoded", function() {101 expect(utd.decompressUriTrie("~(a~'1)")).to.deep.equal([{102 name: "a",103 entryType: "mark",104 duration: 0,105 startTime: 1106 }]);107 });108 it("should return UserTimings for a simple measure JSURL encoded", function() {109 expect(utd.decompressUriTrie("~(a~'1_a)")).to.deep.equal([{110 name: "a",111 entryType: "measure",112 startTime: 1,113 duration: 10114 }]);115 });116 });117 //118 // decompressUriArray119 //120 describe(".decompressUriArray()", function() {121 it("should return an empty array for non-strings", function() {122 expect(utd.decompressUriArray()).to.deep.equal([]);123 expect(utd.decompressUriArray(null)).to.deep.equal([]);124 expect(utd.decompressUriArray(1)).to.deep.equal([]);125 expect(utd.decompressUriArray(0)).to.deep.equal([]);126 expect(utd.decompressUriArray([])).to.deep.equal([]);127 expect(utd.decompressUriArray(undefined)).to.deep.equal([]);128 expect(utd.decompressUriArray(true)).to.deep.equal([]);129 expect(utd.decompressUriArray(false)).to.deep.equal([]);130 expect(utd.decompressUriArray([])).to.deep.equal([]);131 });132 it("should decompress a the value 'mark1~1'", function() {133 expect(utd.decompressUriArray("mark1~1")).to.deep.equal([{134 name: "mark1",135 entryType: "mark",136 duration: 0,137 startTime: 1138 }]);139 });140 it("should decompress a the value 'mark1~10'", function() {141 expect(utd.decompressUriArray("mark1~10")).to.deep.equal([{142 name: "mark1",143 entryType: "mark",144 duration: 0,145 startTime: 36146 }]);147 });148 it("should decompress a the value 'mark1~a'", function() {149 expect(utd.decompressUriArray("mark1~a")).to.deep.equal([{150 name: "mark1",151 entryType: "mark",152 duration: 0,153 startTime: 10154 }]);155 });156 it("should decompress a the value 'measure~a_1'", function() {157 expect(utd.decompressUriArray("measure~a_1")).to.deep.equal([{158 name: "measure",159 entryType: "measure",160 startTime: 10,161 duration: 1162 }]);163 });164 it("should decompress a the value 'measure~1_a'", function() {165 expect(utd.decompressUriArray("measure~1_a")).to.deep.equal([{166 name: "measure",167 entryType: "measure",168 startTime: 1,169 duration: 10170 }]);171 });172 it("should decompress a the value 'mark1~1*'", function() {173 expect(utd.decompressUriArray("mark1~1*")).to.deep.equal([{174 name: "mark1",175 entryType: "mark",176 duration: 0,177 startTime: 1178 },179 {180 name: "mark1",181 entryType: "mark",182 duration: 0,183 startTime: 2184 }]);185 });186 it("should decompress a the value 'mark1~1*2'", function() {187 expect(utd.decompressUriArray("mark1~1*2")).to.deep.equal([{188 name: "mark1",189 entryType: "mark",190 duration: 0,191 startTime: 1192 },193 {194 name: "mark1",195 entryType: "mark",196 duration: 0,197 startTime: 2198 }]);199 });200 it("should decompress a the value 'mark1~1.2'", function() {201 expect(utd.decompressUriArray("mark1~1.2")).to.deep.equal([{202 name: "mark1",203 entryType: "mark",204 duration: 0,205 startTime: 1206 },207 {208 name: "mark1",209 entryType: "mark",210 duration: 0,211 startTime: 3212 }]);213 });214 it("should decompress the value 'mark1~1.2.'", function() {215 expect(utd.decompressUriArray("mark1~1.2.")).to.deep.equal([{216 name: "mark1",217 entryType: "mark",218 duration: 0,219 startTime: 1220 },221 {222 name: "mark1",223 entryType: "mark",224 duration: 0,225 startTime: 3226 },227 {228 name: "mark1",229 entryType: "mark",230 duration: 0,231 startTime: 3232 }]);233 });234 it("should decompress a the value 'mark1~1.2*3.3*.", function() {235 expect(utd.decompressUriArray("mark1~1.2*3.3*.")).to.deep236 .equal([{237 name: "mark1",238 entryType: "mark",239 duration: 0,240 startTime: 1241 },242 {243 name: "mark1",244 entryType: "mark",245 duration: 0,246 startTime: 3247 },248 {249 name: "mark1",250 entryType: "mark",251 duration: 0,252 startTime: 5253 },254 {255 name: "mark1",256 entryType: "mark",257 duration: 0,258 startTime: 7259 },260 {261 name: "mark1",262 entryType: "mark",263 duration: 0,264 startTime: 10265 },266 {267 name: "mark1",268 entryType: "mark",269 duration: 0,270 startTime: 13271 },272 {273 name: "mark1",274 entryType: "mark",275 duration: 0,276 startTime: 13277 }]);278 });279 it("should decompress a the value 'mark1~1~mark2~2", function() {280 expect(utd.decompressUriArray("mark1~1~mark2~2")).to.deep281 .equal([{282 name: "mark1",283 entryType: "mark",284 duration: 0,285 startTime: 1286 },287 {288 name: "mark2",289 entryType: "mark",290 duration: 0,291 startTime: 2292 }]);293 });294 it("should decompress a the value 'mark1~1~mark2~2~mark3~3", function() {295 expect(utd.decompressUriArray("mark1~1~mark2~2~mark3~3")).to.deep296 .equal([{297 name: "mark1",298 entryType: "mark",299 duration: 0,300 startTime: 1301 },302 {303 name: "mark2",304 entryType: "mark",305 duration: 0,306 startTime: 2307 },308 {309 name: "mark3",310 entryType: "mark",311 duration: 0,312 startTime: 3313 }]);314 });315 it("should decompress a the value 'mark1~1~measure2~2_a", function() {316 expect(utd.decompressUriArray("mark1~1~measure2~2_a")).to.deep317 .equal([{318 name: "mark1",319 entryType: "mark",320 duration: 0,321 startTime: 1322 },323 {324 name: "measure2",325 entryType: "measure",326 startTime: 2,327 duration: 10328 }]);329 });330 });331 //332 // generateUserTimings333 //334 describe(".generateUserTimings()", function() {335 it("should return an empty list when given non-string parameters", function() {336 expect(utd.generateUserTimings()).to.deep.equal([]);337 expect(utd.generateUserTimings(null)).to.deep.equal([]);338 expect(utd.generateUserTimings(0)).to.deep.equal([]);339 expect(utd.generateUserTimings(1)).to.deep.equal([]);340 expect(utd.generateUserTimings(false)).to.deep.equal([]);341 expect(utd.generateUserTimings(true)).to.deep.equal([]);342 expect(utd.generateUserTimings(undefined)).to.deep.equal([]);343 expect(utd.generateUserTimings({})).to.deep.equal([]);344 expect(utd.generateUserTimings([])).to.deep.equal([]);345 expect(utd.generateUserTimings("a", null)).to.deep.equal([]);346 expect(utd.generateUserTimings("a", false)).to.deep.equal([]);347 expect(utd.generateUserTimings("a", true)).to.deep.equal([]);348 expect(utd.generateUserTimings("a", undefined)).to.deep.equal([]);349 expect(utd.generateUserTimings("a", {})).to.deep.equal([]);350 expect(utd.generateUserTimings("a", [])).to.deep.equal([]);351 });352 it("should return an single mark when given an array with a single timing", function() {353 expect(utd.generateUserTimings("mark", "a")).to.deep.equal([{354 name: "mark",355 entryType: "mark",356 duration: 0,357 startTime: 10358 }]);359 });360 it("should return an single mark when given an array with a single timing that is numeric", function() {361 expect(utd.generateUserTimings("mark", 1)).to.deep.equal([{362 name: "mark",363 entryType: "mark",364 duration: 0,365 startTime: 1366 }]);367 });368 it("should return an single mark when given an array with a large timing that is numeric", function() {369 expect(utd.generateUserTimings("mark", 123)).to.deep.equal([{370 name: "mark",371 entryType: "mark",372 duration: 0,373 startTime: 1371374 }]);375 });376 it("should return an single measure when given an array with a single timing", function() {377 expect(utd.generateUserTimings("measure", "a_a")).to.deep.equal([{378 name: "measure",379 entryType: "measure",380 startTime: 10,381 duration: 10382 }]);383 });384 it("should return two elements when given an array with a two timings", function() {385 expect(utd.generateUserTimings("mark", "a*")).to.deep.equal([{386 name: "mark",387 entryType: "mark",388 duration: 0,389 startTime: 10390 },391 {392 name: "mark",393 entryType: "mark",394 duration: 0,395 startTime: 20396 }]);397 });398 it("should return three elements when given an array with a three timings", function() {399 expect(utd.generateUserTimings("mark", "a*3")).to.deep.equal([{400 name: "mark",401 entryType: "mark",402 duration: 0,403 startTime: 10404 },405 {406 name: "mark",407 entryType: "mark",408 duration: 0,409 startTime: 20410 },411 {412 name: "mark",413 entryType: "mark",414 duration: 0,415 startTime: 30416 }]);417 });418 it("should return three elements when given an array with a three timings (non-repeating)", function() {419 expect(utd.generateUserTimings("mark", "a.b.c")).to.deep.equal([{420 name: "mark",421 entryType: "mark",422 duration: 0,423 startTime: 10424 },425 {426 name: "mark",427 entryType: "mark",428 duration: 0,429 startTime: 21430 },431 {432 name: "mark",433 entryType: "mark",434 duration: 0,435 startTime: 33436 }]);437 });438 it("should return three elements when given an array with a three timings (non-repeating)", function() {439 expect(utd.generateUserTimings("measure", "a_a.b_b.c_c")).to.deep.equal([{440 name: "measure",441 entryType: "measure",442 startTime: 10,443 duration: 10444 },445 {446 name: "measure",447 entryType: "measure",448 startTime: 21,449 duration: 11450 },451 {452 name: "measure",453 entryType: "measure",454 startTime: 33,455 duration: 12456 }]);457 });458 });459 //460 // decompressArray461 //462 describe(".decompressArray()", function() {463 it("should return an empty array for non-strings", function() {464 expect(utd.decompressArray()).to.deep.equal([]);465 expect(utd.decompressArray(null)).to.deep.equal([]);466 expect(utd.decompressArray(1)).to.deep.equal([]);467 expect(utd.decompressArray(0)).to.deep.equal([]);468 expect(utd.decompressArray({})).to.deep.equal([]);469 expect(utd.decompressArray(undefined)).to.deep.equal([]);470 expect(utd.decompressArray(true)).to.deep.equal([]);471 expect(utd.decompressArray(false)).to.deep.equal([]);472 });473 it("should decompress a the value '1'", function() {474 expect(utd.decompressArray("1")).to.deep.equal(["1"]);475 });476 it("should decompress a the value '10'", function() {477 expect(utd.decompressArray("10")).to.deep.equal(["10"]);478 });479 it("should decompress a the value 'a'", function() {480 expect(utd.decompressArray("a")).to.deep.equal(["a"]);481 });482 it("should decompress a the value '1*'", function() {483 expect(utd.decompressArray("1*")).to.deep.equal(["1", "1"]);484 });485 it("should decompress a the value '1*2'", function() {486 expect(utd.decompressArray("1*2")).to.deep.equal(["1", "1"]);487 });488 it("should decompress a the value '1.2'", function() {489 expect(utd.decompressArray("1.2")).to.deep.equal(["1", "2"]);490 });491 it("should decompress a the value '1.2.'", function() {492 expect(utd.decompressArray("1.2.")).to.deep.equal(["1", "2", "0"]);493 });494 it("should decompress a the value '1.2*3.3*.", function() {495 expect(utd.decompressArray("1.2*3.3*.")).to.deep.equal(["1", "2", "2", "2", "3", "3", "0"]);496 });497 it("should decompress a the value '1_a'", function() {498 expect(utd.decompressArray("1_a")).to.deep.equal(["1_a"]);499 });500 it("should decompress a the value '1_a*2'", function() {501 expect(utd.decompressArray("1_a*2")).to.deep.equal(["1_a", "1_a"]);502 });503 });504 //505 // decompressUserTiming506 //507 describe(".decompressUserTiming()", function() {508 it("should return an empty array for non-strings", function() {509 expect(utd.decompressUserTiming()).to.deep.equal([]);510 expect(utd.decompressUserTiming(null)).to.deep.equal([]);511 expect(utd.decompressUserTiming(1)).to.deep.equal([]);512 expect(utd.decompressUserTiming(0)).to.deep.equal([]);513 expect(utd.decompressUserTiming({})).to.deep.equal([]);514 expect(utd.decompressUserTiming(undefined)).to.deep.equal([]);515 expect(utd.decompressUserTiming(true)).to.deep.equal([]);516 expect(utd.decompressUserTiming(false)).to.deep.equal([]);517 });518 it("should return UserTimings marks for a simple object-JSON", function() {519 expect(utd.decompressUserTiming("{\"a\": \"1\"}")).to.deep.equal([{520 name: "a",521 startTime: 1,522 duration: 0,523 entryType: "mark"524 }]);525 });526 it("should return UserTimings marks for a simple object-JSON URI-encoded", function() {527 expect(utd.decompressUserTiming("('a'~'1')")).to.deep.equal([{528 name: "a",529 startTime: 1,530 duration: 0,531 entryType: "mark"532 }]);533 });534 it("should return UserTimings marks or a simple JSURL encoded", function() {535 expect(utd.decompressUserTiming("~(a~'1)")).to.deep.equal([{536 name: "a",537 startTime: 1,538 duration: 0,539 entryType: "mark"540 }]);541 });542 it("should return UserTimings measures for a simple object-JSON", function() {543 expect(utd.decompressUserTiming("{\"a\": \"1_2\"}")).to.deep.equal([{544 name: "a",545 startTime: 1,546 entryType: "measure",547 duration: 2548 }]);549 });550 it("should return UserTimings measures for a simple object-JSON URI-encoded", function() {551 expect(utd.decompressUserTiming("('a'~'1_2')")).to.deep.equal([{552 name: "a",553 startTime: 1,554 entryType: "measure",555 duration: 2556 }]);557 });558 it("should return UserTimings measures for a simple JSURL encoded", function() {559 expect(utd.decompressUserTiming("~(a~'1_2)")).to.deep.equal([{560 name: "a",561 startTime: 1,562 entryType: "measure",563 duration: 2564 }]);565 });566 it("should return UserTimings marks and measures for a simple object-JSON", function() {567 expect(utd.decompressUserTiming("{\"a\": \"1_2.1\"}")).to.deep.equal([{568 name: "a",569 startTime: 1,570 entryType: "measure",571 duration: 2572 },573 {574 name: "a",575 startTime: 2,576 duration: 0,577 entryType: "mark"578 }]);579 });580 it("should return UserTimings marks and measures for a simple object-JSON URI-encoded", function() {581 expect(utd.decompressUserTiming("('a'~'1_2.1')")).to.deep.equal([{582 name: "a",583 startTime: 1,584 entryType: "measure",585 duration: 2586 },587 {588 name: "a",589 startTime: 2,590 duration: 0,591 entryType: "mark"592 }]);593 });594 it("should return UserTimings marks and measures for a simple JSURL encoded", function() {595 expect(utd.decompressUserTiming("~(a~'1_2.1)")).to.deep.equal([{596 name: "a",597 startTime: 1,598 entryType: "measure",599 duration: 2600 },601 {602 name: "a",603 startTime: 2,604 duration: 0,605 entryType: "mark"606 }]);607 });608 });609 });...

Full Screen

Full Screen

structjpeg__decompress__struct.js

Source:structjpeg__decompress__struct.js Github

copy

Full Screen

1var structjpeg__decompress__struct =2[3 [ "ac_huff_tbl_ptrs", "structjpeg__decompress__struct.html#aee32db8740389ecb5e83255bd7f27ca9", null ],4 [ "actual_number_of_colors", "structjpeg__decompress__struct.html#a769a3ea56a6116a2693903042d57bb58", null ],5 [ "Adobe_transform", "structjpeg__decompress__struct.html#af88d572abdd8bdf8f53dc979a870bbbd", null ],6 [ "Ah", "structjpeg__decompress__struct.html#a0ce329c8f2fc5df5afd401632623c4a5", null ],7 [ "Al", "structjpeg__decompress__struct.html#a1e16717d692de3473ea3bb49a75fbb60", null ],8 [ "arith_ac_K", "structjpeg__decompress__struct.html#ab20a02c95fb17c1ccf3adf3027489890", null ],9 [ "arith_code", "structjpeg__decompress__struct.html#a50af15b7b855a6e14f630ccefc0432bf", null ],10 [ "arith_dc_L", "structjpeg__decompress__struct.html#a8d26a9f37579758d24616cda2c8b4b05", null ],11 [ "arith_dc_U", "structjpeg__decompress__struct.html#a76c4942c3130cf8481d55c54191ef9f7", null ],12 [ "blocks_in_MCU", "structjpeg__decompress__struct.html#a9a566432e939703e277a38a9cc9c60f4", null ],13 [ "buffered_image", "structjpeg__decompress__struct.html#afb8d5ca8f876a160dec38f4f7b5f5602", null ],14 [ "CCIR601_sampling", "structjpeg__decompress__struct.html#afbcfd41eca673f6b4dd9f4e639e69e64", null ],15 [ "cconvert", "structjpeg__decompress__struct.html#a81eeb61c48dc0cdfaed9ce13e3553784", null ],16 [ "coef", "structjpeg__decompress__struct.html#a781821e000a702d75e9386ae4bc3a839", null ],17 [ "coef_bits", "structjpeg__decompress__struct.html#afdc08cc41b462e5c864a07ca9ad46a04", null ],18 [ "colormap", "structjpeg__decompress__struct.html#a22e32b86f937aca66099aaf1980d6cc7", null ],19 [ "comp_info", "structjpeg__decompress__struct.html#a2cdbc9dbfcf1e9a6e794822c20c067cb", null ],20 [ "comps_in_scan", "structjpeg__decompress__struct.html#a4ff3bda3835712b0ce0342e9746cd2ff", null ],21 [ "cquantize", "structjpeg__decompress__struct.html#a7aebf053e5854ce75860599d40d5f952", null ],22 [ "cur_comp_info", "structjpeg__decompress__struct.html#aa2f0bf5dff97a2676995b34140793825", null ],23 [ "data_precision", "structjpeg__decompress__struct.html#a134188f90a3332d64fc2bcf8af89a12b", null ],24 [ "dc_huff_tbl_ptrs", "structjpeg__decompress__struct.html#ad70d88daaa252af39cddd28d91d5e72b", null ],25 [ "dct_method", "structjpeg__decompress__struct.html#a04032322a4221d1f6b107b45956c24aa", null ],26 [ "density_unit", "structjpeg__decompress__struct.html#a1a005c4c28a36ee498874bef709fd087", null ],27 [ "desired_number_of_colors", "structjpeg__decompress__struct.html#a869a433a978b8716105ba49849573589", null ],28 [ "dither_mode", "structjpeg__decompress__struct.html#a0822d65e976ce618d77648c40fd84ac9", null ],29 [ "do_block_smoothing", "structjpeg__decompress__struct.html#a7153e52cb0b9e6e81fe033c9af6ee263", null ],30 [ "do_fancy_upsampling", "structjpeg__decompress__struct.html#aa078009d4f78f4d068167c07d7933608", null ],31 [ "enable_1pass_quant", "structjpeg__decompress__struct.html#ab935830312bceaa6fa83211c0c1dc93d", null ],32 [ "enable_2pass_quant", "structjpeg__decompress__struct.html#a3c95b2d819627321393fc52892c73eb3", null ],33 [ "enable_external_quant", "structjpeg__decompress__struct.html#a9264dba03d14b45b66f1130e0304db40", null ],34 [ "entropy", "structjpeg__decompress__struct.html#aa87e19a2bc0afcaebd4e968d3f085be1", null ],35 [ "idct", "structjpeg__decompress__struct.html#af1ff707a673473cf44828a5296621e0a", null ],36 [ "image_height", "structjpeg__decompress__struct.html#a24f6e6fcab5d2e9fc0228419c181a657", null ],37 [ "image_width", "structjpeg__decompress__struct.html#abb4a6d3633e5b412d8da7030e62362d1", null ],38 [ "input_iMCU_row", "structjpeg__decompress__struct.html#afd263bfe59d3a1f21fa82e80ab7713ee", null ],39 [ "input_scan_number", "structjpeg__decompress__struct.html#a599e90cc8d2d0f21a08329690de9b5f7", null ],40 [ "inputctl", "structjpeg__decompress__struct.html#a44a883ed25418688f9c949d49135e24e", null ],41 [ "JFIF_major_version", "structjpeg__decompress__struct.html#a8ba5a1fa0aa90a44c1c4f803d58876d8", null ],42 [ "JFIF_minor_version", "structjpeg__decompress__struct.html#ade1156bcf6812b120b8a6f049f87cb39", null ],43 [ "jpeg_color_space", "structjpeg__decompress__struct.html#ad1db731be93151e7b43d453557857ce9", null ],44 [ "jpeg_common_fields", "structjpeg__decompress__struct.html#a9be75f402698aedbde0c0e499328af59", null ],45 [ "main", "structjpeg__decompress__struct.html#a8ead718cddb1f580bc3a74be0ef050d8", null ],46 [ "marker", "structjpeg__decompress__struct.html#a915fc1955a2bb6e47067d9da2ad64097", null ],47 [ "marker_list", "structjpeg__decompress__struct.html#a2c7597424695e557b52c666f9e57e941", null ],48 [ "master", "structjpeg__decompress__struct.html#acd866461afacb65a1098045c4ab3a262", null ],49 [ "max_h_samp_factor", "structjpeg__decompress__struct.html#afc5bee7fabe4ce4077cc9b5cfeed293e", null ],50 [ "max_v_samp_factor", "structjpeg__decompress__struct.html#a68f2fb927f484d230445e582554ea681", null ],51 [ "MCU_membership", "structjpeg__decompress__struct.html#a9f56ae7904b4d1ce61c370449b421fda", null ],52 [ "MCU_rows_in_scan", "structjpeg__decompress__struct.html#ad9d524904177bde3e1e9c515af882552", null ],53 [ "MCUs_per_row", "structjpeg__decompress__struct.html#a4bf94c89145c48b582a27eb183d96f9d", null ],54 [ "min_DCT_scaled_size", "structjpeg__decompress__struct.html#ae66338d3bb577f4f686492a426d74b79", null ],55 [ "num_components", "structjpeg__decompress__struct.html#ae111615d0ff14a34b42c364f436aed9f", null ],56 [ "out_color_components", "structjpeg__decompress__struct.html#a244ecc4a01b098da08741a0dbe7ae430", null ],57 [ "out_color_space", "structjpeg__decompress__struct.html#aba615b1e70ae7d615d2038dc2422db8a", null ],58 [ "output_components", "structjpeg__decompress__struct.html#abb4f77fd529ada61f98df7f4d6667a46", null ],59 [ "output_gamma", "structjpeg__decompress__struct.html#ac74ca6db1e7f21529b8251033de02749", null ],60 [ "output_height", "structjpeg__decompress__struct.html#a7864bdc3d44a6a838fc0b8e0e3caf75f", null ],61 [ "output_iMCU_row", "structjpeg__decompress__struct.html#ae6adbb167857080cd9fc3b59b8f0ae26", null ],62 [ "output_scan_number", "structjpeg__decompress__struct.html#a0e070266d4d354c414b910274436b3ce", null ],63 [ "output_scanline", "structjpeg__decompress__struct.html#a98ce42ac450f1e380f4682fd28c5587e", null ],64 [ "output_width", "structjpeg__decompress__struct.html#af80367ef53fbe5c2cc7bb99cba611a0d", null ],65 [ "post", "structjpeg__decompress__struct.html#a393a8281ab8488e71c1e1cb43eda005d", null ],66 [ "progressive_mode", "structjpeg__decompress__struct.html#adb1267e699e9dc70e6056c85c20a986b", null ],67 [ "quant_tbl_ptrs", "structjpeg__decompress__struct.html#a99349f969e6ef0199d73b77005f1c7ac", null ],68 [ "quantize_colors", "structjpeg__decompress__struct.html#afe9acf836838f5bcee06036a3f9ef354", null ],69 [ "raw_data_out", "structjpeg__decompress__struct.html#a6027f3445dfbddfe63e5fe4b9e79c49d", null ],70 [ "rec_outbuf_height", "structjpeg__decompress__struct.html#a2895971e23bdfa87f27ce6ce5ffa50be", null ],71 [ "restart_interval", "structjpeg__decompress__struct.html#ac3d5e0468b572f7bb478c507a56f5c58", null ],72 [ "sample_range_limit", "structjpeg__decompress__struct.html#ad492f1fd0fb0e1ce9a0f9f98cf8974fc", null ],73 [ "saw_Adobe_marker", "structjpeg__decompress__struct.html#a1c3eb3e5b0fcd92e6b2cb3df88395bf6", null ],74 [ "saw_JFIF_marker", "structjpeg__decompress__struct.html#a3d03ad9dcffd3b2de43f4254f4a2941c", null ],75 [ "scale_denom", "structjpeg__decompress__struct.html#a5cddd5b83366e7068f6f5ebfb30b511d", null ],76 [ "scale_num", "structjpeg__decompress__struct.html#a584f789ebc86969b0543a898d996ac64", null ],77 [ "Se", "structjpeg__decompress__struct.html#aa48ad5a8248eacc3a1e32f83d8e042ad", null ],78 [ "src", "structjpeg__decompress__struct.html#afc6817e59959e5c26b51b03a2121d452", null ],79 [ "Ss", "structjpeg__decompress__struct.html#ab260a491c6fc19739138894ab7deff0d", null ],80 [ "total_iMCU_rows", "structjpeg__decompress__struct.html#a042629051be990c43febe548459e731b", null ],81 [ "two_pass_quantize", "structjpeg__decompress__struct.html#a74d3f33836f0b064ee64473e55a5cf2d", null ],82 [ "unread_marker", "structjpeg__decompress__struct.html#a5f2f9ceff6b1c0879405c7ce61ca2997", null ],83 [ "upsample", "structjpeg__decompress__struct.html#a8a687dd4f2d80c79b287e16bea2d962f", null ],84 [ "X_density", "structjpeg__decompress__struct.html#a7fb85c4d44e846086a7810dc1e18bdfe", null ],85 [ "Y_density", "structjpeg__decompress__struct.html#a314355dec0467165b91c19be141a9a43", null ]...

Full Screen

Full Screen

index.js

Source:index.js Github

copy

Full Screen

1'use strict';2var bufferToVinyl = require('buffer-to-vinyl');3var concatStream = require('concat-stream');4var streamCombiner = require('stream-combiner2');5var vinylFs = require('vinyl-fs');6var vinylAssign = require('vinyl-assign');7/**8 * Initialize Decompress9 *10 * @param {Object} opts11 * @api public12 */13function Decompress(opts) {14 if (!(this instanceof Decompress)) {15 return new Decompress(opts);16 }17 this.opts = opts || {};18 this.streams = [];19}20/**21 * Get or set the source files22 *23 * @param {Array|Buffer|String} file24 * @api public25 */26Decompress.prototype.src = function (file) {27 if (!arguments.length) {28 return this._src;29 }30 this._src = file;31 return this;32};33/**34 * Get or set the destination folder35 *36 * @param {String} dir37 * @api public38 */39Decompress.prototype.dest = function (dir) {40 if (!arguments.length) {41 return this._dest;42 }43 this._dest = dir;44 return this;45};46/**47 * Add a plugin to the middleware stack48 *49 * @param {Function} plugin50 * @api public51 */52Decompress.prototype.use = function (plugin) {53 this.streams.push(plugin);54 return this;55};56/**57 * Decompress archive58 *59 * @param {Function} cb60 * @api public61 */62Decompress.prototype.run = function (cb) {63 cb = cb || function () {};64 var stream = this.createStream();65 stream.on('error', cb);66 stream.pipe(concatStream(cb.bind(null, null)));67};68/**69 * Create stream70 *71 * @api private72 */73Decompress.prototype.createStream = function () {74 this.streams.unshift(vinylAssign({extract: true}));75 this.streams.unshift(this.getFiles());76 if (this.streams.length === 2) {77 this.use(Decompress.tar(this.opts));78 this.use(Decompress.tarbz2(this.opts));79 this.use(Decompress.targz(this.opts));80 this.use(Decompress.zip(this.opts));81 }82 if (this.dest()) {83 this.streams.push(vinylFs.dest(this.dest()));84 }85 return streamCombiner.obj(this.streams);86};87/**88 * Get files89 *90 * @api private91 */92Decompress.prototype.getFiles = function () {93 if (Buffer.isBuffer(this.src())) {94 return bufferToVinyl.stream(this.src());95 }96 return vinylFs.src(this.src());97};98/**99 * Module exports100 */101module.exports = Decompress;102module.exports.tar = require('decompress-tar');103module.exports.tarbz2 = require('decompress-tarbz2');104module.exports.targz = require('decompress-targz');...

Full Screen

Full Screen

Using AI Code Generation

copy

Full Screen

1var wptools = require('wptools');2wptools.decompress('enwiki-latest-pages-articles.xml.bz2', 'enwiki-latest-pages-articles.xml', function(err) {3 if (err) {4 console.log(err);5 }6 console.log('Done');7});8var wptools = require('wptools');9wptools.pageviews('enwiki-latest-pages-articles.xml', 'pageviews.txt', function(err) {10 if (err) {11 console.log(err);12 }13 console.log('Done');14});15var wptools = require('wptools');16wptools.links('enwiki-latest-pages-articles.xml', 'links.txt', function(err) {17 if (err) {18 console.log(err);19 }20 console.log('Done');21});22var wptools = require('wptools');23wptools.categories('enwiki-latest-pages-articles.xml', 'categories.txt', function(err) {24 if (err) {25 console.log(err);26 }27 console.log('Done');28});29var wptools = require('wptools');30wptools.redirects('enwiki-latest-pages-articles.xml', 'redirects.txt', function(err) {31 if (err) {32 console.log(err);33 }34 console.log('Done');35});36var wptools = require('wptools');37wptools.externallinks('enwiki-latest-pages-articles.xml', 'externallinks.txt', function(err) {38 if (err) {39 console.log(err);40 }41 console.log('Done');42});

Full Screen

Using AI Code Generation

copy

Full Screen

1var wptools = require('wptools');2 if (err) {3 console.log(err);4 } else {5 console.log(page);6 }7});

Full Screen

Using AI Code Generation

copy

Full Screen

1var wptools = require('wptools');2wptools.decompress('test.txt', 'test2.txt', function(err) {3 if(err) {4 console.log(err);5 }6 else {7 console.log('Decompressed!');8 }9});

Full Screen

Using AI Code Generation

copy

Full Screen

1var wptools = require('wptools');2var fs = require('fs');3var file = fs.createWriteStream('test.txt');4var wp = wptools.page('Barack Obama');5wp.get(function(err, page) {6 page.decompress().pipe(file);7});

Full Screen

Using AI Code Generation

copy

Full Screen

1var wptools = require('wptools');2var fs = require('fs');3wptools.decompress({4}, function(err, data) {5 if (err) {6 console.log('Error: ' + err);7 } else {8 console.log('Decompressed data: ' + data);9 }10});

Full Screen

Automation Testing Tutorials

Learn to execute automation testing from scratch with LambdaTest Learning Hub. Right from setting up the prerequisites to run your first automation test, to following best practices and diving deeper into advanced test scenarios. LambdaTest Learning Hubs compile a list of step-by-step guides to help you be proficient with different test automation frameworks i.e. Selenium, Cypress, TestNG etc.

LambdaTest Learning Hubs:

YouTube

You could also refer to video tutorials over LambdaTest YouTube channel to get step by step demonstration from industry experts.

Run wpt automation tests on LambdaTest cloud grid

Perform automation testing on 3000+ real desktop and mobile devices online.

Try LambdaTest Now !!

Get 100 minutes of automation test minutes FREE!!

Next-Gen App & Browser Testing Cloud

Was this article helpful?

Helpful

NotHelpful