From: senhuang42 Date: Thu, 1 Oct 2020 15:47:35 +0000 (-0400) Subject: Make function descriptions more accurate X-Git-Tag: v1.4.7~57^2~17 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=10647924f1c0e5daa35a43c8a1af848d09185b09;p=thirdparty%2Fzstd.git Make function descriptions more accurate --- diff --git a/lib/compress/zstd_compress.c b/lib/compress/zstd_compress.c index f298cd895..3a5ad0255 100644 --- a/lib/compress/zstd_compress.c +++ b/lib/compress/zstd_compress.c @@ -2336,7 +2336,7 @@ static size_t ZSTD_buildSeqStore(ZSTD_CCtx* zc, const void* src, size_t srcSize) if (curr > ms->nextToUpdate + 384) ms->nextToUpdate = curr - MIN(192, (U32)(curr - ms->nextToUpdate - 384)); } - + /* select and store sequences */ { ZSTD_dictMode_e const dictMode = ZSTD_matchState_dictMode(ms); size_t lastLLSize; diff --git a/lib/compress/zstd_compress_internal.h b/lib/compress/zstd_compress_internal.h index 954af31f9..179e4f7bb 100644 --- a/lib/compress/zstd_compress_internal.h +++ b/lib/compress/zstd_compress_internal.h @@ -82,7 +82,7 @@ typedef struct { } ZSTD_entropyCTables_t; typedef struct { - U32 off; /* Offset code for the match */ + U32 off; /* Offset code (offset + ZSTD_REP_MOVE) for the match */ U32 len; /* Raw length of match */ } ZSTD_match_t; diff --git a/lib/compress/zstd_ldm.c b/lib/compress/zstd_ldm.c index 60a6e5878..341877858 100644 --- a/lib/compress/zstd_ldm.c +++ b/lib/compress/zstd_ldm.c @@ -579,8 +579,7 @@ size_t ZSTD_ldm_blockCompress(rawSeqStore_t* rawSeqStore, size_t lastLLSize; ms->ldmSeqStore = *rawSeqStore; lastLLSize = blockCompressor(ms, seqStore, rep, src, srcSize); - /* ldm seqstore will have changed during blockCompressor() call, make sure we copy those changes */ - *rawSeqStore = ms->ldmSeqStore; + *rawSeqStore = ms->ldmSeqStore; /* Persist changes to ldmSeqStore during blockCompressor() */ return lastLLSize; } diff --git a/lib/compress/zstd_opt.c b/lib/compress/zstd_opt.c index 7dfd1af64..7efae9b87 100644 --- a/lib/compress/zstd_opt.c +++ b/lib/compress/zstd_opt.c @@ -768,14 +768,13 @@ FORCE_INLINE_TEMPLATE U32 ZSTD_BtGetAllMatches ( * LDM helper functions *********************************/ -/* Moves forward in rawSeqStore by nbBytes, which will update the fields - * 'pos' and 'posInSequence' accordingly. +/* ldm_moveForwardBytesInSeqStore(): + * Moves forward in rawSeqStore by nbBytes, which will update the fields 'pos' and 'posInSequence'. */ static void ldm_moveForwardBytesInSeqStore(rawSeqStore_t* ldmSeqStore, size_t nbBytes) { while (nbBytes && ldmSeqStore->pos < ldmSeqStore->size) { rawSeq currSeq; currSeq = ldmSeqStore->seq[ldmSeqStore->pos]; - if (nbBytes <= currSeq.litLength) { ldmSeqStore->posInSequence += nbBytes; return; @@ -783,7 +782,6 @@ static void ldm_moveForwardBytesInSeqStore(rawSeqStore_t* ldmSeqStore, size_t nb ldmSeqStore->posInSequence += currSeq.litLength; nbBytes -= currSeq.litLength; } - if (nbBytes < currSeq.matchLength) { ldmSeqStore->posInSequence += nbBytes; return; @@ -797,13 +795,14 @@ static void ldm_moveForwardBytesInSeqStore(rawSeqStore_t* ldmSeqStore, size_t nb } } -/* Calculates the beginning and end of a match, and updates 'pos' and 'posInSequence' - * of the ldmSeqStore. +/* ldm_getNextMatchAndUpdateSeqStore(): + * Calculates the beginning and end of the next match in the current block. + * Updates 'pos' and 'posInSequence' of the ldmSeqStore. */ -static void ldm_calculateNextMatch(rawSeqStore_t* ldmSeqStore, - U32* matchStartPosInBlock, U32* matchEndPosInBlock, - U32* matchOffset, U32 currPosInBlock, - U32 blockBytesRemaining) { +static void ldm_getNextMatchAndUpdateSeqStore(rawSeqStore_t* ldmSeqStore, + U32* matchStartPosInBlock, U32* matchEndPosInBlock, + U32* matchOffset, U32 currPosInBlock, + U32 blockBytesRemaining) { rawSeq currSeq; U32 currBlockEndPos; U32 literalsBytesRemaining; @@ -827,7 +826,7 @@ static void ldm_calculateNextMatch(rawSeqStore_t* ldmSeqStore, currSeq.matchLength - (ldmSeqStore->posInSequence - currSeq.litLength) : currSeq.matchLength; - /* If there are more literal bytes than bytes remaining in block, no ldm */ + /* If there are more literal bytes than bytes remaining in block, no ldm is possible */ if (literalsBytesRemaining >= blockBytesRemaining) { *matchStartPosInBlock = UINT_MAX; *matchEndPosInBlock = UINT_MAX; @@ -852,7 +851,10 @@ static void ldm_calculateNextMatch(rawSeqStore_t* ldmSeqStore, } } -/* Adds an LDM if it's long enough */ +/* ldm_maybeAddLdm(): + * Adds a match if it's long enough, based on it's 'matchStartPosInBlock' + * and 'matchEndPosInBlock', into 'matches'. Maintains the correct ordering of 'matches' + */ static void ldm_maybeAddLdm(ZSTD_match_t* matches, U32* nbMatches, U32 matchStartPosInBlock, U32 matchEndPosInBlock, U32 matchOffset, U32 currPosInBlock) { @@ -865,7 +867,7 @@ static void ldm_maybeAddLdm(ZSTD_match_t* matches, U32* nbMatches, if (currPosInBlock < matchStartPosInBlock || currPosInBlock >= matchEndPosInBlock || posDiff > 0 || /* As a next evolution we can enable adding LDMs in the middle of a match */ - candidateMatchLength < ZSTD_LDM_MINMATCH_MIN) + candidateMatchLength < MINMATCH) return; if (*nbMatches == 0) { @@ -879,17 +881,18 @@ static void ldm_maybeAddLdm(ZSTD_match_t* matches, U32* nbMatches, * the ldm match down as necessary. */ if (candidateMatchLength == matches[*nbMatches-1].len) { + U32 candidateMatchIdx; if (candidateOffCode == matches[*nbMatches-1].off) { /* No need to insert the match if it's the exact same */ return; } - U32 candidateMatchIdx = *nbMatches; + candidateMatchIdx = *nbMatches; matches[*nbMatches].len = candidateMatchLength; matches[*nbMatches].off = candidateOffCode; if (candidateOffCode != matches[*nbMatches-1].off) { while (candidateMatchIdx > 0 && - matches[candidateMatchIdx].off > matches[candidateMatchIdx - 1].off && - matches[candidateMatchIdx].len == matches[candidateMatchIdx - 1].len) { + matches[candidateMatchIdx].off > matches[candidateMatchIdx - 1].off && + matches[candidateMatchIdx].len == matches[candidateMatchIdx - 1].len) { ZSTD_match_t tmp = matches[candidateMatchIdx - 1]; matches[candidateMatchIdx - 1] = matches[candidateMatchIdx]; matches[candidateMatchIdx] = tmp; @@ -918,9 +921,9 @@ static void ldm_handleLdm(rawSeqStore_t* ldmSeqStore, ZSTD_match_t* matches, U32 U32 posOvershoot = currPosInBlock - *matchEndPosInBlock; ldm_moveForwardBytesInSeqStore(ldmSeqStore, posOvershoot); } - ldm_calculateNextMatch(ldmSeqStore, matchStartPosInBlock, - matchEndPosInBlock, matchOffset, - currPosInBlock, remainingBytes); + ldm_getNextMatchAndUpdateSeqStore(ldmSeqStore, matchStartPosInBlock, + matchEndPosInBlock, matchOffset, + currPosInBlock, remainingBytes); } ldm_maybeAddLdm(matches, nbMatches, *matchStartPosInBlock, *matchEndPosInBlock, *matchOffset, currPosInBlock); } @@ -985,9 +988,9 @@ ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms, /* Get first match from ldm seq store if long mode is enabled */ if (ms->ldmSeqStore.size > 0 && ms->ldmSeqStore.pos < ms->ldmSeqStore.size) { - ldm_calculateNextMatch(&ms->ldmSeqStore, &ldmStartPosInBlock, - &ldmEndPosInBlock, &ldmOffset, - (U32)(ip-istart), (U32)(iend-ip)); + ldm_getNextMatchAndUpdateSeqStore(&ms->ldmSeqStore, &ldmStartPosInBlock, + &ldmEndPosInBlock, &ldmOffset, + (U32)(ip-istart), (U32)(iend-ip)); } /* init */ @@ -1249,7 +1252,7 @@ _shortestPath: /* cur, last_pos, best_mlen, best_off have to be set */ if (ldmEndPosInBlock < srcSize) { /* This can occur if after adding the final match in an ldm seq store within this block, - ip goes to the end of the block without activating a check for ldm_calculateNextMatch */ + ip reaches end of the block without calling ldm_getNextMatchAndUpdateSeqStore() */ ldm_moveForwardBytesInSeqStore(&ms->ldmSeqStore, srcSize - ldmEndPosInBlock); } /* Return the last literals size */