comparison contrib/python-zstandard/zstd/compress/zstd_lazy.h @ 40121:73fef626dae3

zstandard: vendor python-zstandard 0.10.1 This was just released. The upstream source distribution from PyPI was extracted. Unwanted files were removed. The clang-format ignore list was updated to reflect the new source of files. setup.py was updated to pass a new argument to python-zstandard's function for returning an Extension instance. Upstream had to change to use relative paths because Python 3.7's packaging doesn't seem to like absolute paths when defining sources, includes, etc. The default relative path calculation is relative to setup_zstd.py which is different from the directory of Mercurial's setup.py. The project contains a vendored copy of zstandard 1.3.6. The old version was 1.3.4. The API should be backwards compatible and nothing in core should need adjusted. However, there is a new "chunker" API that we may find useful in places where we want to emit compressed chunks of a fixed size. There are a pair of bug fixes in 0.10.0 with regards to compressobj() and decompressobj() when block flushing is used. I actually found these bugs when introducing these APIs in Mercurial! But existing Mercurial code is not affected because we don't perform block flushing. # no-check-commit because 3rd party code has different style guidelines Differential Revision: https://phab.mercurial-scm.org/D4911
author Gregory Szorc <gregory.szorc@gmail.com>
date Mon, 08 Oct 2018 16:27:40 -0700
parents b1fb341d8a61
children 69de49c4e39c
comparison
equal deleted inserted replaced
40120:89742f1fa6cb 40121:73fef626dae3
15 extern "C" { 15 extern "C" {
16 #endif 16 #endif
17 17
18 #include "zstd_compress_internal.h" 18 #include "zstd_compress_internal.h"
19 19
20 U32 ZSTD_insertAndFindFirstIndex( 20 U32 ZSTD_insertAndFindFirstIndex(ZSTD_matchState_t* ms, const BYTE* ip);
21 ZSTD_matchState_t* ms, ZSTD_compressionParameters const* cParams,
22 const BYTE* ip);
23 21
24 void ZSTD_preserveUnsortedMark (U32* const table, U32 const size, U32 const reducerValue); /*! used in ZSTD_reduceIndex(). pre-emptively increase value of ZSTD_DUBT_UNSORTED_MARK */ 22 void ZSTD_preserveUnsortedMark (U32* const table, U32 const size, U32 const reducerValue); /*! used in ZSTD_reduceIndex(). pre-emptively increase value of ZSTD_DUBT_UNSORTED_MARK */
25 23
26 size_t ZSTD_compressBlock_btlazy2( 24 size_t ZSTD_compressBlock_btlazy2(
27 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], 25 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
28 ZSTD_compressionParameters const* cParams, void const* src, size_t srcSize); 26 void const* src, size_t srcSize);
29 size_t ZSTD_compressBlock_lazy2( 27 size_t ZSTD_compressBlock_lazy2(
30 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], 28 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
31 ZSTD_compressionParameters const* cParams, void const* src, size_t srcSize); 29 void const* src, size_t srcSize);
32 size_t ZSTD_compressBlock_lazy( 30 size_t ZSTD_compressBlock_lazy(
33 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], 31 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
34 ZSTD_compressionParameters const* cParams, void const* src, size_t srcSize); 32 void const* src, size_t srcSize);
35 size_t ZSTD_compressBlock_greedy( 33 size_t ZSTD_compressBlock_greedy(
36 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], 34 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
37 ZSTD_compressionParameters const* cParams, void const* src, size_t srcSize); 35 void const* src, size_t srcSize);
36
37 size_t ZSTD_compressBlock_btlazy2_dictMatchState(
38 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
39 void const* src, size_t srcSize);
40 size_t ZSTD_compressBlock_lazy2_dictMatchState(
41 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
42 void const* src, size_t srcSize);
43 size_t ZSTD_compressBlock_lazy_dictMatchState(
44 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
45 void const* src, size_t srcSize);
46 size_t ZSTD_compressBlock_greedy_dictMatchState(
47 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
48 void const* src, size_t srcSize);
38 49
39 size_t ZSTD_compressBlock_greedy_extDict( 50 size_t ZSTD_compressBlock_greedy_extDict(
40 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], 51 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
41 ZSTD_compressionParameters const* cParams, void const* src, size_t srcSize); 52 void const* src, size_t srcSize);
42 size_t ZSTD_compressBlock_lazy_extDict( 53 size_t ZSTD_compressBlock_lazy_extDict(
43 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], 54 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
44 ZSTD_compressionParameters const* cParams, void const* src, size_t srcSize); 55 void const* src, size_t srcSize);
45 size_t ZSTD_compressBlock_lazy2_extDict( 56 size_t ZSTD_compressBlock_lazy2_extDict(
46 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], 57 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
47 ZSTD_compressionParameters const* cParams, void const* src, size_t srcSize); 58 void const* src, size_t srcSize);
48 size_t ZSTD_compressBlock_btlazy2_extDict( 59 size_t ZSTD_compressBlock_btlazy2_extDict(
49 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], 60 ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
50 ZSTD_compressionParameters const* cParams, void const* src, size_t srcSize); 61 void const* src, size_t srcSize);
51 62
52 #if defined (__cplusplus) 63 #if defined (__cplusplus)
53 } 64 }
54 #endif 65 #endif
55 66