From: Yann Collet Date: Tue, 10 Dec 2024 23:35:11 +0000 (-0800) Subject: codemod: ZSTD_matchState_t -> ZSTD_MatchState_t X-Git-Tag: v1.5.7^2~48^2~51 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=5df80acedb6b352e949ef24446cb489b15524ce0;p=thirdparty%2Fzstd.git codemod: ZSTD_matchState_t -> ZSTD_MatchState_t --- diff --git a/lib/compress/zstd_compress.c b/lib/compress/zstd_compress.c index e068b48bd..029928fb2 100644 --- a/lib/compress/zstd_compress.c +++ b/lib/compress/zstd_compress.c @@ -48,7 +48,7 @@ * in log format, aka 17 => 1 << 17 == 128Ki positions. * This structure is only used in zstd_opt. * Since allocation is centralized for all strategies, it has to be known here. - * The actual (selected) size of the hash table is then stored in ZSTD_matchState_t.hashLog3, + * The actual (selected) size of the hash table is then stored in ZSTD_MatchState_t.hashLog3, * so that zstd_opt.c doesn't need to know about this constant. */ #ifndef ZSTD_HASHLOG3_MAX @@ -82,7 +82,7 @@ struct ZSTD_CDict_s { ZSTD_dictContentType_e dictContentType; /* The dictContentType the CDict was created with */ U32* entropyWorkspace; /* entropy workspace of HUF_WORKSPACE_SIZE bytes */ ZSTD_cwksp workspace; - ZSTD_matchState_t matchState; + ZSTD_MatchState_t matchState; ZSTD_compressedBlockState_t cBlockState; ZSTD_customMem customMem; U32 dictID; @@ -1938,7 +1938,7 @@ void ZSTD_reset_compressedBlockState(ZSTD_compressedBlockState_t* bs) * Invalidate all the matches in the match finder tables. * Requires nextSrc and base to be set (can be NULL). */ -static void ZSTD_invalidateMatchState(ZSTD_matchState_t* ms) +static void ZSTD_invalidateMatchState(ZSTD_MatchState_t* ms) { ZSTD_window_clear(&ms->window); @@ -1985,12 +1985,12 @@ static U64 ZSTD_bitmix(U64 val, U64 len) { } /* Mixes in the hashSalt and hashSaltEntropy to create a new hashSalt */ -static void ZSTD_advanceHashSalt(ZSTD_matchState_t* ms) { +static void ZSTD_advanceHashSalt(ZSTD_MatchState_t* ms) { ms->hashSalt = ZSTD_bitmix(ms->hashSalt, 8) ^ ZSTD_bitmix((U64) ms->hashSaltEntropy, 4); } static size_t -ZSTD_reset_matchState(ZSTD_matchState_t* ms, +ZSTD_reset_matchState(ZSTD_MatchState_t* ms, ZSTD_cwksp* ws, const ZSTD_compressionParameters* cParams, const ZSTD_paramSwitch_e useRowMatchFinder, @@ -2473,8 +2473,8 @@ static size_t ZSTD_resetCCtx_byCopyingCDict(ZSTD_CCtx* cctx, ZSTD_cwksp_mark_tables_clean(&cctx->workspace); /* copy dictionary offsets */ - { ZSTD_matchState_t const* srcMatchState = &cdict->matchState; - ZSTD_matchState_t* dstMatchState = &cctx->blockState.matchState; + { ZSTD_MatchState_t const* srcMatchState = &cdict->matchState; + ZSTD_MatchState_t* dstMatchState = &cctx->blockState.matchState; dstMatchState->window = srcMatchState->window; dstMatchState->nextToUpdate = srcMatchState->nextToUpdate; dstMatchState->loadedDictEnd= srcMatchState->loadedDictEnd; @@ -2576,8 +2576,8 @@ static size_t ZSTD_copyCCtx_internal(ZSTD_CCtx* dstCCtx, /* copy dictionary offsets */ { - const ZSTD_matchState_t* srcMatchState = &srcCCtx->blockState.matchState; - ZSTD_matchState_t* dstMatchState = &dstCCtx->blockState.matchState; + const ZSTD_MatchState_t* srcMatchState = &srcCCtx->blockState.matchState; + ZSTD_MatchState_t* dstMatchState = &dstCCtx->blockState.matchState; dstMatchState->window = srcMatchState->window; dstMatchState->nextToUpdate = srcMatchState->nextToUpdate; dstMatchState->loadedDictEnd= srcMatchState->loadedDictEnd; @@ -2671,7 +2671,7 @@ static void ZSTD_reduceTable_btlazy2(U32* const table, U32 const size, U32 const /*! ZSTD_reduceIndex() : * rescale all indexes to avoid future overflow (indexes are U32) */ -static void ZSTD_reduceIndex (ZSTD_matchState_t* ms, ZSTD_CCtx_params const* params, const U32 reducerValue) +static void ZSTD_reduceIndex (ZSTD_MatchState_t* ms, ZSTD_CCtx_params const* params, const U32 reducerValue) { { U32 const hSize = (U32)1 << params->cParams.hashLog; ZSTD_reduceTable(ms->hashTable, hSize, reducerValue); @@ -3246,17 +3246,17 @@ typedef struct { } ZSTD_SequencePosition; static size_t -ZSTD_copySequencesToSeqStoreExplicitBlockDelim(ZSTD_CCtx* cctx, - ZSTD_SequencePosition* seqPos, - const ZSTD_Sequence* const inSeqs, size_t inSeqsSize, - const void* src, size_t blockSize, - ZSTD_paramSwitch_e externalRepSearch); +ZSTD_transferSequences_wBlockDelim(ZSTD_CCtx* cctx, + ZSTD_SequencePosition* seqPos, + const ZSTD_Sequence* const inSeqs, size_t inSeqsSize, + const void* src, size_t blockSize, + ZSTD_paramSwitch_e externalRepSearch); typedef enum { ZSTDbss_compress, ZSTDbss_noCompress } ZSTD_BuildSeqStore_e; static size_t ZSTD_buildSeqStore(ZSTD_CCtx* zc, const void* src, size_t srcSize) { - ZSTD_matchState_t* const ms = &zc->blockState.matchState; + ZSTD_MatchState_t* const ms = &zc->blockState.matchState; DEBUGLOG(5, "ZSTD_buildSeqStore (srcSize=%zu)", srcSize); assert(srcSize <= ZSTD_BLOCKSIZE_MAX); /* Assert that we have correctly flushed the ctx params into the ms's copy */ @@ -3372,7 +3372,7 @@ static size_t ZSTD_buildSeqStore(ZSTD_CCtx* zc, const void* src, size_t srcSize) size_t const seqLenSum = ZSTD_fastSequenceLengthSum(zc->extSeqBuf, nbPostProcessedSeqs); RETURN_ERROR_IF(seqLenSum > srcSize, externalSequences_invalid, "External sequences imply too large a block!"); FORWARD_IF_ERROR( - ZSTD_copySequencesToSeqStoreExplicitBlockDelim( + ZSTD_transferSequences_wBlockDelim( zc, &seqPos, zc->extSeqBuf, nbPostProcessedSeqs, src, srcSize, @@ -4515,7 +4515,7 @@ static size_t ZSTD_compressBlock_targetCBlockSize(ZSTD_CCtx* zc, return cSize; } -static void ZSTD_overflowCorrectIfNeeded(ZSTD_matchState_t* ms, +static void ZSTD_overflowCorrectIfNeeded(ZSTD_MatchState_t* ms, ZSTD_cwksp* ws, ZSTD_CCtx_params const* params, void const* ip, @@ -4600,7 +4600,7 @@ static size_t ZSTD_compress_frameChunk(ZSTD_CCtx* cctx, XXH64_update(&cctx->xxhState, src, srcSize); while (remaining) { - ZSTD_matchState_t* const ms = &cctx->blockState.matchState; + ZSTD_MatchState_t* const ms = &cctx->blockState.matchState; size_t const blockSize = ZSTD_optimalBlockSize(cctx, ip, remaining, blockSizeMax, @@ -4784,7 +4784,7 @@ static size_t ZSTD_compressContinue_internal (ZSTD_CCtx* cctx, const void* src, size_t srcSize, U32 frame, U32 lastFrameChunk) { - ZSTD_matchState_t* const ms = &cctx->blockState.matchState; + ZSTD_MatchState_t* const ms = &cctx->blockState.matchState; size_t fhSize = 0; DEBUGLOG(5, "ZSTD_compressContinue_internal, stage: %u, srcSize: %u", @@ -4888,7 +4888,7 @@ size_t ZSTD_compressBlock(ZSTD_CCtx* cctx, void* dst, size_t dstCapacity, const /*! ZSTD_loadDictionaryContent() : * @return : 0, or an error code */ -static size_t ZSTD_loadDictionaryContent(ZSTD_matchState_t* ms, +static size_t ZSTD_loadDictionaryContent(ZSTD_MatchState_t* ms, ldmState_t* ls, ZSTD_cwksp* ws, ZSTD_CCtx_params const* params, @@ -5146,7 +5146,7 @@ size_t ZSTD_loadCEntropy(ZSTD_compressedBlockState_t* bs, void* workspace, * dictSize supposed >= 8 */ static size_t ZSTD_loadZstdDictionary(ZSTD_compressedBlockState_t* bs, - ZSTD_matchState_t* ms, + ZSTD_MatchState_t* ms, ZSTD_cwksp* ws, ZSTD_CCtx_params const* params, const void* dict, size_t dictSize, @@ -5179,7 +5179,7 @@ static size_t ZSTD_loadZstdDictionary(ZSTD_compressedBlockState_t* bs, * @return : dictID, or an error code */ static size_t ZSTD_compress_insertDictionary(ZSTD_compressedBlockState_t* bs, - ZSTD_matchState_t* ms, + ZSTD_MatchState_t* ms, ldmState_t* ls, ZSTD_cwksp* ws, const ZSTD_CCtx_params* params, @@ -6621,7 +6621,7 @@ static U32 ZSTD_finalizeOffBase(U32 rawOffset, const U32 rep[ZSTD_REP_NUM], U32 * @returns 0 on success, and a ZSTD_error otherwise. */ static size_t -ZSTD_copySequencesToSeqStoreExplicitBlockDelim(ZSTD_CCtx* cctx, +ZSTD_transferSequences_wBlockDelim(ZSTD_CCtx* cctx, ZSTD_SequencePosition* seqPos, const ZSTD_Sequence* const inSeqs, size_t inSeqsSize, const void* src, size_t blockSize, @@ -6634,7 +6634,7 @@ ZSTD_copySequencesToSeqStoreExplicitBlockDelim(ZSTD_CCtx* cctx, repcodes_t updatedRepcodes; U32 dictSize; - DEBUGLOG(5, "ZSTD_copySequencesToSeqStoreExplicitBlockDelim (blockSize = %zu)", blockSize); + DEBUGLOG(5, "ZSTD_transferSequences_wBlockDelim (blockSize = %zu)", blockSize); if (cctx->cdict) { dictSize = (U32)cctx->cdict->dictContentSize; @@ -6852,7 +6852,7 @@ static ZSTD_SequenceCopier_f ZSTD_selectSequenceCopier(ZSTD_SequenceFormat_e mod { assert(ZSTD_cParam_withinBounds(ZSTD_c_blockDelimiters, (int)mode)); if (mode == ZSTD_sf_explicitBlockDelimiters) { - return ZSTD_copySequencesToSeqStoreExplicitBlockDelim; + return ZSTD_transferSequences_wBlockDelim; } assert(mode == ZSTD_sf_noBlockDelimiters); return ZSTD_copySequencesToSeqStoreNoBlockDelim; diff --git a/lib/compress/zstd_compress_internal.h b/lib/compress/zstd_compress_internal.h index d3aa1e234..91066a4be 100644 --- a/lib/compress/zstd_compress_internal.h +++ b/lib/compress/zstd_compress_internal.h @@ -269,11 +269,11 @@ typedef struct { #define ZSTD_WINDOW_START_INDEX 2 -typedef struct ZSTD_matchState_t ZSTD_matchState_t; +typedef struct ZSTD_MatchState_t ZSTD_MatchState_t; #define ZSTD_ROW_HASH_CACHE_SIZE 8 /* Size of prefetching hash cache for row-based matchfinder */ -struct ZSTD_matchState_t { +struct ZSTD_MatchState_t { ZSTD_window_t window; /* State for window round buffer management */ U32 loadedDictEnd; /* index of end of dictionary, within context's referential. * When loadedDictEnd != 0, a dictionary is in use, and still valid. @@ -301,7 +301,7 @@ struct ZSTD_matchState_t { * dedicated dictionary search structure. */ optState_t opt; /* optimal parser state */ - const ZSTD_matchState_t* dictMatchState; + const ZSTD_MatchState_t* dictMatchState; ZSTD_compressionParameters cParams; const rawSeqStore_t* ldmSeqStore; @@ -321,7 +321,7 @@ struct ZSTD_matchState_t { typedef struct { ZSTD_compressedBlockState_t* prevCBlock; ZSTD_compressedBlockState_t* nextCBlock; - ZSTD_matchState_t matchState; + ZSTD_MatchState_t matchState; } ZSTD_blockState_t; typedef struct { @@ -580,7 +580,7 @@ typedef enum { } ZSTD_cParamMode_e; typedef size_t (*ZSTD_blockCompressor) ( - ZSTD_matchState_t* bs, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* bs, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); ZSTD_blockCompressor ZSTD_selectBlockCompressor(ZSTD_strategy strat, ZSTD_paramSwitch_e rowMatchfinderMode, ZSTD_dictMode_e dictMode); @@ -1081,7 +1081,7 @@ MEM_STATIC U32 ZSTD_window_hasExtDict(ZSTD_window_t const window) * Inspects the provided matchState and figures out what dictMode should be * passed to the compressor. */ -MEM_STATIC ZSTD_dictMode_e ZSTD_matchState_dictMode(const ZSTD_matchState_t *ms) +MEM_STATIC ZSTD_dictMode_e ZSTD_matchState_dictMode(const ZSTD_MatchState_t *ms) { return ZSTD_window_hasExtDict(ms->window) ? ZSTD_extDict : @@ -1270,7 +1270,7 @@ ZSTD_window_enforceMaxDist(ZSTD_window_t* window, const void* blockEnd, U32 maxDist, U32* loadedDictEndPtr, - const ZSTD_matchState_t** dictMatchStatePtr) + const ZSTD_MatchState_t** dictMatchStatePtr) { U32 const blockEndIdx = (U32)((BYTE const*)blockEnd - window->base); U32 const loadedDictEnd = (loadedDictEndPtr != NULL) ? *loadedDictEndPtr : 0; @@ -1315,7 +1315,7 @@ ZSTD_checkDictValidity(const ZSTD_window_t* window, const void* blockEnd, U32 maxDist, U32* loadedDictEndPtr, - const ZSTD_matchState_t** dictMatchStatePtr) + const ZSTD_MatchState_t** dictMatchStatePtr) { assert(loadedDictEndPtr != NULL); assert(dictMatchStatePtr != NULL); @@ -1405,7 +1405,7 @@ U32 ZSTD_window_update(ZSTD_window_t* window, /** * Returns the lowest allowed match index. It may either be in the ext-dict or the prefix. */ -MEM_STATIC U32 ZSTD_getLowestMatchIndex(const ZSTD_matchState_t* ms, U32 curr, unsigned windowLog) +MEM_STATIC U32 ZSTD_getLowestMatchIndex(const ZSTD_MatchState_t* ms, U32 curr, unsigned windowLog) { U32 const maxDistance = 1U << windowLog; U32 const lowestValid = ms->window.lowLimit; @@ -1422,7 +1422,7 @@ MEM_STATIC U32 ZSTD_getLowestMatchIndex(const ZSTD_matchState_t* ms, U32 curr, u /** * Returns the lowest allowed match index in the prefix. */ -MEM_STATIC U32 ZSTD_getLowestPrefixIndex(const ZSTD_matchState_t* ms, U32 curr, unsigned windowLog) +MEM_STATIC U32 ZSTD_getLowestPrefixIndex(const ZSTD_MatchState_t* ms, U32 curr, unsigned windowLog) { U32 const maxDistance = 1U << windowLog; U32 const lowestValid = ms->window.dictLimit; diff --git a/lib/compress/zstd_double_fast.c b/lib/compress/zstd_double_fast.c index 1406c2d50..1a266e7d9 100644 --- a/lib/compress/zstd_double_fast.c +++ b/lib/compress/zstd_double_fast.c @@ -15,7 +15,7 @@ static ZSTD_ALLOW_POINTER_OVERFLOW_ATTR -void ZSTD_fillDoubleHashTableForCDict(ZSTD_matchState_t* ms, +void ZSTD_fillDoubleHashTableForCDict(ZSTD_MatchState_t* ms, void const* end, ZSTD_dictTableLoadMethod_e dtlm) { const ZSTD_compressionParameters* const cParams = &ms->cParams; @@ -53,7 +53,7 @@ void ZSTD_fillDoubleHashTableForCDict(ZSTD_matchState_t* ms, static ZSTD_ALLOW_POINTER_OVERFLOW_ATTR -void ZSTD_fillDoubleHashTableForCCtx(ZSTD_matchState_t* ms, +void ZSTD_fillDoubleHashTableForCCtx(ZSTD_MatchState_t* ms, void const* end, ZSTD_dictTableLoadMethod_e dtlm) { const ZSTD_compressionParameters* const cParams = &ms->cParams; @@ -87,7 +87,7 @@ void ZSTD_fillDoubleHashTableForCCtx(ZSTD_matchState_t* ms, } } } -void ZSTD_fillDoubleHashTable(ZSTD_matchState_t* ms, +void ZSTD_fillDoubleHashTable(ZSTD_MatchState_t* ms, const void* const end, ZSTD_dictTableLoadMethod_e dtlm, ZSTD_tableFillPurpose_e tfp) @@ -103,7 +103,7 @@ void ZSTD_fillDoubleHashTable(ZSTD_matchState_t* ms, FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR size_t ZSTD_compressBlock_doubleFast_noDict_generic( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize, U32 const mls /* template */) { ZSTD_compressionParameters const* cParams = &ms->cParams; @@ -326,7 +326,7 @@ _match_stored: FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR size_t ZSTD_compressBlock_doubleFast_dictMatchState_generic( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize, U32 const mls /* template */) { @@ -347,7 +347,7 @@ size_t ZSTD_compressBlock_doubleFast_dictMatchState_generic( const BYTE* const ilimit = iend - HASH_READ_SIZE; U32 offset_1=rep[0], offset_2=rep[1]; - const ZSTD_matchState_t* const dms = ms->dictMatchState; + const ZSTD_MatchState_t* const dms = ms->dictMatchState; const ZSTD_compressionParameters* const dictCParams = &dms->cParams; const U32* const dictHashLong = dms->hashTable; const U32* const dictHashSmall = dms->chainTable; @@ -548,7 +548,7 @@ _match_stored: #define ZSTD_GEN_DFAST_FN(dictMode, mls) \ static size_t ZSTD_compressBlock_doubleFast_##dictMode##_##mls( \ - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], \ + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], \ void const* src, size_t srcSize) \ { \ return ZSTD_compressBlock_doubleFast_##dictMode##_generic(ms, seqStore, rep, src, srcSize, mls); \ @@ -566,7 +566,7 @@ ZSTD_GEN_DFAST_FN(dictMatchState, 7) size_t ZSTD_compressBlock_doubleFast( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { const U32 mls = ms->cParams.minMatch; @@ -586,7 +586,7 @@ size_t ZSTD_compressBlock_doubleFast( size_t ZSTD_compressBlock_doubleFast_dictMatchState( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { const U32 mls = ms->cParams.minMatch; @@ -608,7 +608,7 @@ size_t ZSTD_compressBlock_doubleFast_dictMatchState( static ZSTD_ALLOW_POINTER_OVERFLOW_ATTR size_t ZSTD_compressBlock_doubleFast_extDict_generic( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize, U32 const mls /* template */) { @@ -757,7 +757,7 @@ ZSTD_GEN_DFAST_FN(extDict, 6) ZSTD_GEN_DFAST_FN(extDict, 7) size_t ZSTD_compressBlock_doubleFast_extDict( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { U32 const mls = ms->cParams.minMatch; diff --git a/lib/compress/zstd_double_fast.h b/lib/compress/zstd_double_fast.h index 1a746fab9..c58697ad2 100644 --- a/lib/compress/zstd_double_fast.h +++ b/lib/compress/zstd_double_fast.h @@ -20,18 +20,18 @@ extern "C" { #ifndef ZSTD_EXCLUDE_DFAST_BLOCK_COMPRESSOR -void ZSTD_fillDoubleHashTable(ZSTD_matchState_t* ms, +void ZSTD_fillDoubleHashTable(ZSTD_MatchState_t* ms, void const* end, ZSTD_dictTableLoadMethod_e dtlm, ZSTD_tableFillPurpose_e tfp); size_t ZSTD_compressBlock_doubleFast( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_doubleFast_dictMatchState( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_doubleFast_extDict( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); #define ZSTD_COMPRESSBLOCK_DOUBLEFAST ZSTD_compressBlock_doubleFast diff --git a/lib/compress/zstd_fast.c b/lib/compress/zstd_fast.c index 3c8d5b527..ee25bcbac 100644 --- a/lib/compress/zstd_fast.c +++ b/lib/compress/zstd_fast.c @@ -13,7 +13,7 @@ static ZSTD_ALLOW_POINTER_OVERFLOW_ATTR -void ZSTD_fillHashTableForCDict(ZSTD_matchState_t* ms, +void ZSTD_fillHashTableForCDict(ZSTD_MatchState_t* ms, const void* const end, ZSTD_dictTableLoadMethod_e dtlm) { @@ -50,7 +50,7 @@ void ZSTD_fillHashTableForCDict(ZSTD_matchState_t* ms, static ZSTD_ALLOW_POINTER_OVERFLOW_ATTR -void ZSTD_fillHashTableForCCtx(ZSTD_matchState_t* ms, +void ZSTD_fillHashTableForCCtx(ZSTD_MatchState_t* ms, const void* const end, ZSTD_dictTableLoadMethod_e dtlm) { @@ -84,7 +84,7 @@ void ZSTD_fillHashTableForCCtx(ZSTD_matchState_t* ms, } } } } } -void ZSTD_fillHashTable(ZSTD_matchState_t* ms, +void ZSTD_fillHashTable(ZSTD_MatchState_t* ms, const void* const end, ZSTD_dictTableLoadMethod_e dtlm, ZSTD_tableFillPurpose_e tfp) @@ -190,7 +190,7 @@ ZSTD_match4Found_branch(const BYTE* currentPtr, const BYTE* matchAddress, U32 ma FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR size_t ZSTD_compressBlock_fast_noDict_generic( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize, U32 const mls, int useCmov) { @@ -424,7 +424,7 @@ _match: /* Requires: ip0, match0, offcode */ #define ZSTD_GEN_FAST_FN(dictMode, mml, cmov) \ static size_t ZSTD_compressBlock_fast_##dictMode##_##mml##_##cmov( \ - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], \ + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], \ void const* src, size_t srcSize) \ { \ return ZSTD_compressBlock_fast_##dictMode##_generic(ms, seqStore, rep, src, srcSize, mml, cmov); \ @@ -441,7 +441,7 @@ ZSTD_GEN_FAST_FN(noDict, 6, 0) ZSTD_GEN_FAST_FN(noDict, 7, 0) size_t ZSTD_compressBlock_fast( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { U32 const mml = ms->cParams.minMatch; @@ -481,7 +481,7 @@ size_t ZSTD_compressBlock_fast( FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR size_t ZSTD_compressBlock_fast_dictMatchState_generic( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize, U32 const mls, U32 const hasStep) { const ZSTD_compressionParameters* const cParams = &ms->cParams; @@ -500,7 +500,7 @@ size_t ZSTD_compressBlock_fast_dictMatchState_generic( const BYTE* const ilimit = iend - HASH_READ_SIZE; U32 offset_1=rep[0], offset_2=rep[1]; - const ZSTD_matchState_t* const dms = ms->dictMatchState; + const ZSTD_MatchState_t* const dms = ms->dictMatchState; const ZSTD_compressionParameters* const dictCParams = &dms->cParams ; const U32* const dictHashTable = dms->hashTable; const U32 dictStartIndex = dms->window.dictLimit; @@ -684,7 +684,7 @@ ZSTD_GEN_FAST_FN(dictMatchState, 6, 0) ZSTD_GEN_FAST_FN(dictMatchState, 7, 0) size_t ZSTD_compressBlock_fast_dictMatchState( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { U32 const mls = ms->cParams.minMatch; @@ -707,7 +707,7 @@ size_t ZSTD_compressBlock_fast_dictMatchState( static ZSTD_ALLOW_POINTER_OVERFLOW_ATTR size_t ZSTD_compressBlock_fast_extDict_generic( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize, U32 const mls, U32 const hasStep) { const ZSTD_compressionParameters* const cParams = &ms->cParams; @@ -965,7 +965,7 @@ ZSTD_GEN_FAST_FN(extDict, 6, 0) ZSTD_GEN_FAST_FN(extDict, 7, 0) size_t ZSTD_compressBlock_fast_extDict( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { U32 const mls = ms->cParams.minMatch; diff --git a/lib/compress/zstd_fast.h b/lib/compress/zstd_fast.h index 5a971cf32..f7fbf4f68 100644 --- a/lib/compress/zstd_fast.h +++ b/lib/compress/zstd_fast.h @@ -18,17 +18,17 @@ extern "C" { #include "../common/mem.h" /* U32 */ #include "zstd_compress_internal.h" -void ZSTD_fillHashTable(ZSTD_matchState_t* ms, +void ZSTD_fillHashTable(ZSTD_MatchState_t* ms, void const* end, ZSTD_dictTableLoadMethod_e dtlm, ZSTD_tableFillPurpose_e tfp); size_t ZSTD_compressBlock_fast( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_fast_dictMatchState( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_fast_extDict( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); #if defined (__cplusplus) diff --git a/lib/compress/zstd_lazy.c b/lib/compress/zstd_lazy.c index 9fe56c76b..272ebe0ec 100644 --- a/lib/compress/zstd_lazy.c +++ b/lib/compress/zstd_lazy.c @@ -26,7 +26,7 @@ static ZSTD_ALLOW_POINTER_OVERFLOW_ATTR -void ZSTD_updateDUBT(ZSTD_matchState_t* ms, +void ZSTD_updateDUBT(ZSTD_MatchState_t* ms, const BYTE* ip, const BYTE* iend, U32 mls) { @@ -71,7 +71,7 @@ void ZSTD_updateDUBT(ZSTD_matchState_t* ms, * doesn't fail */ static ZSTD_ALLOW_POINTER_OVERFLOW_ATTR -void ZSTD_insertDUBT1(const ZSTD_matchState_t* ms, +void ZSTD_insertDUBT1(const ZSTD_MatchState_t* ms, U32 curr, const BYTE* inputEnd, U32 nbCompares, U32 btLow, const ZSTD_dictMode_e dictMode) @@ -162,7 +162,7 @@ void ZSTD_insertDUBT1(const ZSTD_matchState_t* ms, static ZSTD_ALLOW_POINTER_OVERFLOW_ATTR size_t ZSTD_DUBT_findBetterDictMatch ( - const ZSTD_matchState_t* ms, + const ZSTD_MatchState_t* ms, const BYTE* const ip, const BYTE* const iend, size_t* offsetPtr, size_t bestLength, @@ -170,7 +170,7 @@ size_t ZSTD_DUBT_findBetterDictMatch ( U32 const mls, const ZSTD_dictMode_e dictMode) { - const ZSTD_matchState_t * const dms = ms->dictMatchState; + const ZSTD_MatchState_t * const dms = ms->dictMatchState; const ZSTD_compressionParameters* const dmsCParams = &dms->cParams; const U32 * const dictHashTable = dms->hashTable; U32 const hashLog = dmsCParams->hashLog; @@ -240,7 +240,7 @@ size_t ZSTD_DUBT_findBetterDictMatch ( static ZSTD_ALLOW_POINTER_OVERFLOW_ATTR -size_t ZSTD_DUBT_findBestMatch(ZSTD_matchState_t* ms, +size_t ZSTD_DUBT_findBestMatch(ZSTD_MatchState_t* ms, const BYTE* const ip, const BYTE* const iend, size_t* offBasePtr, U32 const mls, @@ -392,7 +392,7 @@ size_t ZSTD_DUBT_findBestMatch(ZSTD_matchState_t* ms, /** ZSTD_BtFindBestMatch() : Tree updater, providing best match */ FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR -size_t ZSTD_BtFindBestMatch( ZSTD_matchState_t* ms, +size_t ZSTD_BtFindBestMatch( ZSTD_MatchState_t* ms, const BYTE* const ip, const BYTE* const iLimit, size_t* offBasePtr, const U32 mls /* template */, @@ -408,7 +408,7 @@ size_t ZSTD_BtFindBestMatch( ZSTD_matchState_t* ms, * Dedicated dict search ***********************************/ -void ZSTD_dedicatedDictSearch_lazy_loadDictionary(ZSTD_matchState_t* ms, const BYTE* const ip) +void ZSTD_dedicatedDictSearch_lazy_loadDictionary(ZSTD_MatchState_t* ms, const BYTE* const ip) { const BYTE* const base = ms->window.base; U32 const target = (U32)(ip - base); @@ -527,7 +527,7 @@ void ZSTD_dedicatedDictSearch_lazy_loadDictionary(ZSTD_matchState_t* ms, const B */ FORCE_INLINE_TEMPLATE size_t ZSTD_dedicatedDictSearch_lazy_search(size_t* offsetPtr, size_t ml, U32 nbAttempts, - const ZSTD_matchState_t* const dms, + const ZSTD_MatchState_t* const dms, const BYTE* const ip, const BYTE* const iLimit, const BYTE* const prefixStart, const U32 curr, const U32 dictLimit, const size_t ddsIdx) { @@ -630,7 +630,7 @@ size_t ZSTD_dedicatedDictSearch_lazy_search(size_t* offsetPtr, size_t ml, U32 nb FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR U32 ZSTD_insertAndFindFirstIndex_internal( - ZSTD_matchState_t* ms, + ZSTD_MatchState_t* ms, const ZSTD_compressionParameters* const cParams, const BYTE* ip, U32 const mls, U32 const lazySkipping) { @@ -656,7 +656,7 @@ U32 ZSTD_insertAndFindFirstIndex_internal( return hashTable[ZSTD_hashPtr(ip, hashLog, mls)]; } -U32 ZSTD_insertAndFindFirstIndex(ZSTD_matchState_t* ms, const BYTE* ip) { +U32 ZSTD_insertAndFindFirstIndex(ZSTD_MatchState_t* ms, const BYTE* ip) { const ZSTD_compressionParameters* const cParams = &ms->cParams; return ZSTD_insertAndFindFirstIndex_internal(ms, cParams, ip, ms->cParams.minMatch, /* lazySkipping*/ 0); } @@ -665,7 +665,7 @@ U32 ZSTD_insertAndFindFirstIndex(ZSTD_matchState_t* ms, const BYTE* ip) { FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR size_t ZSTD_HcFindBestMatch( - ZSTD_matchState_t* ms, + ZSTD_MatchState_t* ms, const BYTE* const ip, const BYTE* const iLimit, size_t* offsetPtr, const U32 mls, const ZSTD_dictMode_e dictMode) @@ -689,7 +689,7 @@ size_t ZSTD_HcFindBestMatch( U32 nbAttempts = 1U << cParams->searchLog; size_t ml=4-1; - const ZSTD_matchState_t* const dms = ms->dictMatchState; + const ZSTD_MatchState_t* const dms = ms->dictMatchState; const U32 ddsHashLog = dictMode == ZSTD_dedicatedDictSearch ? dms->cParams.hashLog - ZSTD_LAZY_DDSS_BUCKET_LOG : 0; const size_t ddsIdx = dictMode == ZSTD_dedicatedDictSearch @@ -834,7 +834,7 @@ FORCE_INLINE_TEMPLATE void ZSTD_row_prefetch(U32 const* hashTable, BYTE const* t */ FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR -void ZSTD_row_fillHashCache(ZSTD_matchState_t* ms, const BYTE* base, +void ZSTD_row_fillHashCache(ZSTD_MatchState_t* ms, const BYTE* base, U32 const rowLog, U32 const mls, U32 idx, const BYTE* const iLimit) { @@ -882,7 +882,7 @@ U32 ZSTD_row_nextCachedHash(U32* cache, U32 const* hashTable, */ FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR -void ZSTD_row_update_internalImpl(ZSTD_matchState_t* ms, +void ZSTD_row_update_internalImpl(ZSTD_MatchState_t* ms, U32 updateStartIdx, U32 const updateEndIdx, U32 const mls, U32 const rowLog, U32 const rowMask, U32 const useCache) @@ -913,7 +913,7 @@ void ZSTD_row_update_internalImpl(ZSTD_matchState_t* ms, */ FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR -void ZSTD_row_update_internal(ZSTD_matchState_t* ms, const BYTE* ip, +void ZSTD_row_update_internal(ZSTD_MatchState_t* ms, const BYTE* ip, U32 const mls, U32 const rowLog, U32 const rowMask, U32 const useCache) { @@ -946,7 +946,7 @@ void ZSTD_row_update_internal(ZSTD_matchState_t* ms, const BYTE* ip, * External wrapper for ZSTD_row_update_internal(). Used for filling the hashtable during dictionary * processing. */ -void ZSTD_row_update(ZSTD_matchState_t* const ms, const BYTE* ip) { +void ZSTD_row_update(ZSTD_MatchState_t* const ms, const BYTE* ip) { const U32 rowLog = BOUNDED(4, ms->cParams.searchLog, 6); const U32 rowMask = (1u << rowLog) - 1; const U32 mls = MIN(ms->cParams.minMatch, 6 /* mls caps out at 6 */); @@ -1139,7 +1139,7 @@ ZSTD_row_getMatchMask(const BYTE* const tagRow, const BYTE tag, const U32 headGr FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR size_t ZSTD_RowFindBestMatch( - ZSTD_matchState_t* ms, + ZSTD_MatchState_t* ms, const BYTE* const ip, const BYTE* const iLimit, size_t* offsetPtr, const U32 mls, const ZSTD_dictMode_e dictMode, @@ -1171,7 +1171,7 @@ size_t ZSTD_RowFindBestMatch( U32 hash; /* DMS/DDS variables that may be referenced laster */ - const ZSTD_matchState_t* const dms = ms->dictMatchState; + const ZSTD_MatchState_t* const dms = ms->dictMatchState; /* Initialize the following variables to satisfy static analyzer */ size_t ddsIdx = 0; @@ -1340,7 +1340,7 @@ size_t ZSTD_RowFindBestMatch( * ZSTD_searchMax() dispatches to the correct implementation function. * * TODO: The start of the search function involves loading and calculating a - * bunch of constants from the ZSTD_matchState_t. These computations could be + * bunch of constants from the ZSTD_MatchState_t. These computations could be * done in an initialization function, and saved somewhere in the match state. * Then we could pass a pointer to the saved state instead of the match state, * and avoid duplicate computations. @@ -1364,7 +1364,7 @@ size_t ZSTD_RowFindBestMatch( #define GEN_ZSTD_BT_SEARCH_FN(dictMode, mls) \ ZSTD_SEARCH_FN_ATTRS size_t ZSTD_BT_SEARCH_FN(dictMode, mls)( \ - ZSTD_matchState_t* ms, \ + ZSTD_MatchState_t* ms, \ const BYTE* ip, const BYTE* const iLimit, \ size_t* offBasePtr) \ { \ @@ -1374,7 +1374,7 @@ size_t ZSTD_RowFindBestMatch( #define GEN_ZSTD_HC_SEARCH_FN(dictMode, mls) \ ZSTD_SEARCH_FN_ATTRS size_t ZSTD_HC_SEARCH_FN(dictMode, mls)( \ - ZSTD_matchState_t* ms, \ + ZSTD_MatchState_t* ms, \ const BYTE* ip, const BYTE* const iLimit, \ size_t* offsetPtr) \ { \ @@ -1384,7 +1384,7 @@ size_t ZSTD_RowFindBestMatch( #define GEN_ZSTD_ROW_SEARCH_FN(dictMode, mls, rowLog) \ ZSTD_SEARCH_FN_ATTRS size_t ZSTD_ROW_SEARCH_FN(dictMode, mls, rowLog)( \ - ZSTD_matchState_t* ms, \ + ZSTD_MatchState_t* ms, \ const BYTE* ip, const BYTE* const iLimit, \ size_t* offsetPtr) \ { \ @@ -1485,7 +1485,7 @@ typedef enum { search_hashChain=0, search_binaryTree=1, search_rowHash=2 } searc * If a match is found its offset is stored in @p offsetPtr. */ FORCE_INLINE_TEMPLATE size_t ZSTD_searchMax( - ZSTD_matchState_t* ms, + ZSTD_MatchState_t* ms, const BYTE* ip, const BYTE* iend, size_t* offsetPtr, @@ -1514,7 +1514,7 @@ FORCE_INLINE_TEMPLATE size_t ZSTD_searchMax( FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR size_t ZSTD_compressBlock_lazy_generic( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], const void* src, size_t srcSize, const searchMethod_e searchMethod, const U32 depth, @@ -1537,7 +1537,7 @@ size_t ZSTD_compressBlock_lazy_generic( const int isDMS = dictMode == ZSTD_dictMatchState; const int isDDS = dictMode == ZSTD_dedicatedDictSearch; const int isDxS = isDMS || isDDS; - const ZSTD_matchState_t* const dms = ms->dictMatchState; + const ZSTD_MatchState_t* const dms = ms->dictMatchState; const U32 dictLowestIndex = isDxS ? dms->window.dictLimit : 0; const BYTE* const dictBase = isDxS ? dms->window.base : NULL; const BYTE* const dictLowest = isDxS ? dictBase + dictLowestIndex : NULL; @@ -1782,42 +1782,42 @@ _storeSequence: #ifndef ZSTD_EXCLUDE_GREEDY_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_greedy( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 0, ZSTD_noDict); } size_t ZSTD_compressBlock_greedy_dictMatchState( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 0, ZSTD_dictMatchState); } size_t ZSTD_compressBlock_greedy_dedicatedDictSearch( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 0, ZSTD_dedicatedDictSearch); } size_t ZSTD_compressBlock_greedy_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 0, ZSTD_noDict); } size_t ZSTD_compressBlock_greedy_dictMatchState_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 0, ZSTD_dictMatchState); } size_t ZSTD_compressBlock_greedy_dedicatedDictSearch_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 0, ZSTD_dedicatedDictSearch); @@ -1826,42 +1826,42 @@ size_t ZSTD_compressBlock_greedy_dedicatedDictSearch_row( #ifndef ZSTD_EXCLUDE_LAZY_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_lazy( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 1, ZSTD_noDict); } size_t ZSTD_compressBlock_lazy_dictMatchState( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 1, ZSTD_dictMatchState); } size_t ZSTD_compressBlock_lazy_dedicatedDictSearch( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 1, ZSTD_dedicatedDictSearch); } size_t ZSTD_compressBlock_lazy_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 1, ZSTD_noDict); } size_t ZSTD_compressBlock_lazy_dictMatchState_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 1, ZSTD_dictMatchState); } size_t ZSTD_compressBlock_lazy_dedicatedDictSearch_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 1, ZSTD_dedicatedDictSearch); @@ -1870,42 +1870,42 @@ size_t ZSTD_compressBlock_lazy_dedicatedDictSearch_row( #ifndef ZSTD_EXCLUDE_LAZY2_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_lazy2( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 2, ZSTD_noDict); } size_t ZSTD_compressBlock_lazy2_dictMatchState( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 2, ZSTD_dictMatchState); } size_t ZSTD_compressBlock_lazy2_dedicatedDictSearch( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 2, ZSTD_dedicatedDictSearch); } size_t ZSTD_compressBlock_lazy2_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 2, ZSTD_noDict); } size_t ZSTD_compressBlock_lazy2_dictMatchState_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 2, ZSTD_dictMatchState); } size_t ZSTD_compressBlock_lazy2_dedicatedDictSearch_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 2, ZSTD_dedicatedDictSearch); @@ -1914,14 +1914,14 @@ size_t ZSTD_compressBlock_lazy2_dedicatedDictSearch_row( #ifndef ZSTD_EXCLUDE_BTLAZY2_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_btlazy2( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_binaryTree, 2, ZSTD_noDict); } size_t ZSTD_compressBlock_btlazy2_dictMatchState( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_generic(ms, seqStore, rep, src, srcSize, search_binaryTree, 2, ZSTD_dictMatchState); @@ -1935,7 +1935,7 @@ size_t ZSTD_compressBlock_btlazy2_dictMatchState( FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR size_t ZSTD_compressBlock_lazy_extDict_generic( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], const void* src, size_t srcSize, const searchMethod_e searchMethod, const U32 depth) @@ -2139,14 +2139,14 @@ _storeSequence: #ifndef ZSTD_EXCLUDE_GREEDY_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_greedy_extDict( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_hashChain, 0); } size_t ZSTD_compressBlock_greedy_extDict_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 0); @@ -2155,7 +2155,7 @@ size_t ZSTD_compressBlock_greedy_extDict_row( #ifndef ZSTD_EXCLUDE_LAZY_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_lazy_extDict( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { @@ -2163,7 +2163,7 @@ size_t ZSTD_compressBlock_lazy_extDict( } size_t ZSTD_compressBlock_lazy_extDict_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { @@ -2173,7 +2173,7 @@ size_t ZSTD_compressBlock_lazy_extDict_row( #ifndef ZSTD_EXCLUDE_LAZY2_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_lazy2_extDict( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { @@ -2181,7 +2181,7 @@ size_t ZSTD_compressBlock_lazy2_extDict( } size_t ZSTD_compressBlock_lazy2_extDict_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { return ZSTD_compressBlock_lazy_extDict_generic(ms, seqStore, rep, src, srcSize, search_rowHash, 2); @@ -2190,7 +2190,7 @@ size_t ZSTD_compressBlock_lazy2_extDict_row( #ifndef ZSTD_EXCLUDE_BTLAZY2_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_btlazy2_extDict( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize) { diff --git a/lib/compress/zstd_lazy.h b/lib/compress/zstd_lazy.h index 21258145e..6da51e4ec 100644 --- a/lib/compress/zstd_lazy.h +++ b/lib/compress/zstd_lazy.h @@ -31,38 +31,38 @@ extern "C" { || !defined(ZSTD_EXCLUDE_LAZY_BLOCK_COMPRESSOR) \ || !defined(ZSTD_EXCLUDE_LAZY2_BLOCK_COMPRESSOR) \ || !defined(ZSTD_EXCLUDE_BTLAZY2_BLOCK_COMPRESSOR) -U32 ZSTD_insertAndFindFirstIndex(ZSTD_matchState_t* ms, const BYTE* ip); -void ZSTD_row_update(ZSTD_matchState_t* const ms, const BYTE* ip); +U32 ZSTD_insertAndFindFirstIndex(ZSTD_MatchState_t* ms, const BYTE* ip); +void ZSTD_row_update(ZSTD_MatchState_t* const ms, const BYTE* ip); -void ZSTD_dedicatedDictSearch_lazy_loadDictionary(ZSTD_matchState_t* ms, const BYTE* const ip); +void ZSTD_dedicatedDictSearch_lazy_loadDictionary(ZSTD_MatchState_t* ms, const BYTE* const ip); void ZSTD_preserveUnsortedMark (U32* const table, U32 const size, U32 const reducerValue); /*! used in ZSTD_reduceIndex(). preemptively increase value of ZSTD_DUBT_UNSORTED_MARK */ #endif #ifndef ZSTD_EXCLUDE_GREEDY_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_greedy( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_greedy_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_greedy_dictMatchState( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_greedy_dictMatchState_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_greedy_dedicatedDictSearch( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_greedy_dedicatedDictSearch_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_greedy_extDict( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_greedy_extDict_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); #define ZSTD_COMPRESSBLOCK_GREEDY ZSTD_compressBlock_greedy @@ -86,28 +86,28 @@ size_t ZSTD_compressBlock_greedy_extDict_row( #ifndef ZSTD_EXCLUDE_LAZY_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_lazy( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_lazy_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_lazy_dictMatchState( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_lazy_dictMatchState_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_lazy_dedicatedDictSearch( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_lazy_dedicatedDictSearch_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_lazy_extDict( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_lazy_extDict_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); #define ZSTD_COMPRESSBLOCK_LAZY ZSTD_compressBlock_lazy @@ -131,28 +131,28 @@ size_t ZSTD_compressBlock_lazy_extDict_row( #ifndef ZSTD_EXCLUDE_LAZY2_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_lazy2( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_lazy2_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_lazy2_dictMatchState( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_lazy2_dictMatchState_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_lazy2_dedicatedDictSearch( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_lazy2_dedicatedDictSearch_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_lazy2_extDict( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_lazy2_extDict_row( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); #define ZSTD_COMPRESSBLOCK_LAZY2 ZSTD_compressBlock_lazy2 @@ -176,13 +176,13 @@ size_t ZSTD_compressBlock_lazy2_extDict_row( #ifndef ZSTD_EXCLUDE_BTLAZY2_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_btlazy2( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_btlazy2_dictMatchState( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_btlazy2_extDict( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); #define ZSTD_COMPRESSBLOCK_BTLAZY2 ZSTD_compressBlock_btlazy2 diff --git a/lib/compress/zstd_ldm.c b/lib/compress/zstd_ldm.c index e636a42c0..8812b72a2 100644 --- a/lib/compress/zstd_ldm.c +++ b/lib/compress/zstd_ldm.c @@ -234,7 +234,7 @@ static size_t ZSTD_ldm_countBackwardsMatch_2segments( * * The tables for the other strategies are filled within their * block compressors. */ -static size_t ZSTD_ldm_fillFastTables(ZSTD_matchState_t* ms, +static size_t ZSTD_ldm_fillFastTables(ZSTD_MatchState_t* ms, void const* end) { const BYTE* const iend = (const BYTE*)end; @@ -314,7 +314,7 @@ void ZSTD_ldm_fillHashTable( * Sets cctx->nextToUpdate to a position corresponding closer to anchor * if it is far way * (after a long match, only update tables a limited amount). */ -static void ZSTD_ldm_limitTableUpdate(ZSTD_matchState_t* ms, const BYTE* anchor) +static void ZSTD_ldm_limitTableUpdate(ZSTD_MatchState_t* ms, const BYTE* anchor) { U32 const curr = (U32)(anchor - ms->window.base); if (curr > ms->nextToUpdate + 1024) { @@ -665,7 +665,7 @@ void ZSTD_ldm_skipRawSeqStoreBytes(rawSeqStore_t* rawSeqStore, size_t nbBytes) { } size_t ZSTD_ldm_blockCompress(rawSeqStore_t* rawSeqStore, - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], ZSTD_paramSwitch_e useRowMatchFinder, void const* src, size_t srcSize) { diff --git a/lib/compress/zstd_ldm.h b/lib/compress/zstd_ldm.h index 624caf937..9bdf90cda 100644 --- a/lib/compress/zstd_ldm.h +++ b/lib/compress/zstd_ldm.h @@ -65,7 +65,7 @@ size_t ZSTD_ldm_generateSequences( * NOTE: This function does not return any errors. */ size_t ZSTD_ldm_blockCompress(rawSeqStore_t* rawSeqStore, - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], ZSTD_paramSwitch_e useRowMatchFinder, void const* src, size_t srcSize); diff --git a/lib/compress/zstd_opt.c b/lib/compress/zstd_opt.c index 1045cdd2c..1aee5017b 100644 --- a/lib/compress/zstd_opt.c +++ b/lib/compress/zstd_opt.c @@ -408,7 +408,7 @@ MEM_STATIC U32 ZSTD_readMINMATCH(const void* memPtr, U32 length) Assumption : always within prefix (i.e. not within extDict) */ static ZSTD_ALLOW_POINTER_OVERFLOW_ATTR -U32 ZSTD_insertAndFindFirstIndexHash3 (const ZSTD_matchState_t* ms, +U32 ZSTD_insertAndFindFirstIndexHash3 (const ZSTD_MatchState_t* ms, U32* nextToUpdate3, const BYTE* const ip) { @@ -440,7 +440,7 @@ U32 ZSTD_insertAndFindFirstIndexHash3 (const ZSTD_matchState_t* ms, static ZSTD_ALLOW_POINTER_OVERFLOW_ATTR U32 ZSTD_insertBt1( - const ZSTD_matchState_t* ms, + const ZSTD_MatchState_t* ms, const BYTE* const ip, const BYTE* const iend, U32 const target, U32 const mls, const int extDict) @@ -560,7 +560,7 @@ U32 ZSTD_insertBt1( FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR void ZSTD_updateTree_internal( - ZSTD_matchState_t* ms, + ZSTD_MatchState_t* ms, const BYTE* const ip, const BYTE* const iend, const U32 mls, const ZSTD_dictMode_e dictMode) { @@ -580,7 +580,7 @@ void ZSTD_updateTree_internal( ms->nextToUpdate = target; } -void ZSTD_updateTree(ZSTD_matchState_t* ms, const BYTE* ip, const BYTE* iend) { +void ZSTD_updateTree(ZSTD_MatchState_t* ms, const BYTE* ip, const BYTE* iend) { ZSTD_updateTree_internal(ms, ip, iend, ms->cParams.minMatch, ZSTD_noDict); } @@ -589,7 +589,7 @@ ZSTD_ALLOW_POINTER_OVERFLOW_ATTR U32 ZSTD_insertBtAndGetAllMatches ( ZSTD_match_t* matches, /* store result (found matches) in this table (presumed large enough) */ - ZSTD_matchState_t* ms, + ZSTD_MatchState_t* ms, U32* nextToUpdate3, const BYTE* const ip, const BYTE* const iLimit, const ZSTD_dictMode_e dictMode, @@ -625,7 +625,7 @@ ZSTD_insertBtAndGetAllMatches ( U32 mnum = 0; U32 nbCompares = 1U << cParams->searchLog; - const ZSTD_matchState_t* dms = dictMode == ZSTD_dictMatchState ? ms->dictMatchState : NULL; + const ZSTD_MatchState_t* dms = dictMode == ZSTD_dictMatchState ? ms->dictMatchState : NULL; const ZSTD_compressionParameters* const dmsCParams = dictMode == ZSTD_dictMatchState ? &dms->cParams : NULL; const BYTE* const dmsBase = dictMode == ZSTD_dictMatchState ? dms->window.base : NULL; @@ -819,7 +819,7 @@ ZSTD_insertBtAndGetAllMatches ( typedef U32 (*ZSTD_getAllMatchesFn)( ZSTD_match_t*, - ZSTD_matchState_t*, + ZSTD_MatchState_t*, U32*, const BYTE*, const BYTE*, @@ -831,7 +831,7 @@ FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR U32 ZSTD_btGetAllMatches_internal( ZSTD_match_t* matches, - ZSTD_matchState_t* ms, + ZSTD_MatchState_t* ms, U32* nextToUpdate3, const BYTE* ip, const BYTE* const iHighLimit, @@ -854,7 +854,7 @@ U32 ZSTD_btGetAllMatches_internal( #define GEN_ZSTD_BT_GET_ALL_MATCHES_(dictMode, mls) \ static U32 ZSTD_BT_GET_ALL_MATCHES_FN(dictMode, mls)( \ ZSTD_match_t* matches, \ - ZSTD_matchState_t* ms, \ + ZSTD_MatchState_t* ms, \ U32* nextToUpdate3, \ const BYTE* ip, \ const BYTE* const iHighLimit, \ @@ -886,7 +886,7 @@ GEN_ZSTD_BT_GET_ALL_MATCHES(dictMatchState) } static ZSTD_getAllMatchesFn -ZSTD_selectBtGetAllMatches(ZSTD_matchState_t const* ms, ZSTD_dictMode_e const dictMode) +ZSTD_selectBtGetAllMatches(ZSTD_MatchState_t const* ms, ZSTD_dictMode_e const dictMode) { ZSTD_getAllMatchesFn const getAllMatchesFns[3][4] = { ZSTD_BT_GET_ALL_MATCHES_ARRAY(noDict), @@ -1072,7 +1072,7 @@ listStats(const U32* table, int lastEltID) FORCE_INLINE_TEMPLATE ZSTD_ALLOW_POINTER_OVERFLOW_ATTR size_t -ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms, +ZSTD_compressBlock_opt_generic(ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], const void* src, size_t srcSize, @@ -1440,7 +1440,7 @@ _shortestPath: /* cur, last_pos, best_mlen, best_off have to be set */ #ifndef ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR static size_t ZSTD_compressBlock_opt0( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], const void* src, size_t srcSize, const ZSTD_dictMode_e dictMode) { return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 0 /* optLevel */, dictMode); @@ -1449,7 +1449,7 @@ static size_t ZSTD_compressBlock_opt0( #ifndef ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR static size_t ZSTD_compressBlock_opt2( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], const void* src, size_t srcSize, const ZSTD_dictMode_e dictMode) { return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, src, srcSize, 2 /* optLevel */, dictMode); @@ -1458,7 +1458,7 @@ static size_t ZSTD_compressBlock_opt2( #ifndef ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_btopt( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], const void* src, size_t srcSize) { DEBUGLOG(5, "ZSTD_compressBlock_btopt"); @@ -1477,7 +1477,7 @@ size_t ZSTD_compressBlock_btopt( */ static ZSTD_ALLOW_POINTER_OVERFLOW_ATTR -void ZSTD_initStats_ultra(ZSTD_matchState_t* ms, +void ZSTD_initStats_ultra(ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], const void* src, size_t srcSize) @@ -1503,7 +1503,7 @@ void ZSTD_initStats_ultra(ZSTD_matchState_t* ms, } size_t ZSTD_compressBlock_btultra( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], const void* src, size_t srcSize) { DEBUGLOG(5, "ZSTD_compressBlock_btultra (srcSize=%zu)", srcSize); @@ -1511,7 +1511,7 @@ size_t ZSTD_compressBlock_btultra( } size_t ZSTD_compressBlock_btultra2( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], const void* src, size_t srcSize) { U32 const curr = (U32)((const BYTE*)src - ms->window.base); @@ -1541,14 +1541,14 @@ size_t ZSTD_compressBlock_btultra2( #ifndef ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_btopt_dictMatchState( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], const void* src, size_t srcSize) { return ZSTD_compressBlock_opt0(ms, seqStore, rep, src, srcSize, ZSTD_dictMatchState); } size_t ZSTD_compressBlock_btopt_extDict( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], const void* src, size_t srcSize) { return ZSTD_compressBlock_opt0(ms, seqStore, rep, src, srcSize, ZSTD_extDict); @@ -1557,14 +1557,14 @@ size_t ZSTD_compressBlock_btopt_extDict( #ifndef ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_btultra_dictMatchState( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], const void* src, size_t srcSize) { return ZSTD_compressBlock_opt2(ms, seqStore, rep, src, srcSize, ZSTD_dictMatchState); } size_t ZSTD_compressBlock_btultra_extDict( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], const void* src, size_t srcSize) { return ZSTD_compressBlock_opt2(ms, seqStore, rep, src, srcSize, ZSTD_extDict); diff --git a/lib/compress/zstd_opt.h b/lib/compress/zstd_opt.h index 9e1cff91a..247c9ee65 100644 --- a/lib/compress/zstd_opt.h +++ b/lib/compress/zstd_opt.h @@ -21,18 +21,18 @@ extern "C" { || !defined(ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR) \ || !defined(ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR) /* used in ZSTD_loadDictionaryContent() */ -void ZSTD_updateTree(ZSTD_matchState_t* ms, const BYTE* ip, const BYTE* iend); +void ZSTD_updateTree(ZSTD_MatchState_t* ms, const BYTE* ip, const BYTE* iend); #endif #ifndef ZSTD_EXCLUDE_BTOPT_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_btopt( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_btopt_dictMatchState( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_btopt_extDict( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); #define ZSTD_COMPRESSBLOCK_BTOPT ZSTD_compressBlock_btopt @@ -46,20 +46,20 @@ size_t ZSTD_compressBlock_btopt_extDict( #ifndef ZSTD_EXCLUDE_BTULTRA_BLOCK_COMPRESSOR size_t ZSTD_compressBlock_btultra( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_btultra_dictMatchState( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); size_t ZSTD_compressBlock_btultra_extDict( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); /* note : no btultra2 variant for extDict nor dictMatchState, * because btultra2 is not meant to work with dictionaries * and is only specific for the first block (no prefix) */ size_t ZSTD_compressBlock_btultra2( - ZSTD_matchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], + ZSTD_MatchState_t* ms, SeqStore_t* seqStore, U32 rep[ZSTD_REP_NUM], void const* src, size_t srcSize); #define ZSTD_COMPRESSBLOCK_BTULTRA ZSTD_compressBlock_btultra