{
ZSTD_window_clear(&ms->window);
- ms->nextToUpdate = ms->window.dictLimit + 1;
- ms->nextToUpdate3 = ms->window.dictLimit + 1;
+ ms->nextToUpdate = ms->window.dictLimit;
+ ms->nextToUpdate3 = ms->window.dictLimit;
ms->loadedDictEnd = 0;
ms->opt.litLengthSum = 0; /* force reset of btopt stats */
ms->dictMatchState = NULL;
sequences, nbSeq, longOffsets);
}
+/* ZSTD_compressSequences_internal():
+ * actually compresses both literals and sequences */
MEM_STATIC size_t
ZSTD_compressSequences_internal(seqStore_t* seqStorePtr,
- ZSTD_entropyCTables_t const* prevEntropy,
- ZSTD_entropyCTables_t* nextEntropy,
- ZSTD_CCtx_params const* cctxParams,
- void* dst, size_t dstCapacity,
- void* workspace, size_t wkspSize,
- const int bmi2)
+ const ZSTD_entropyCTables_t* prevEntropy,
+ ZSTD_entropyCTables_t* nextEntropy,
+ const ZSTD_CCtx_params* cctxParams,
+ void* dst, size_t dstCapacity,
+ void* workspace, size_t wkspSize,
+ const int bmi2)
{
const int longOffsets = cctxParams->cParams.windowLog > STREAM_ACCUMULATOR_MIN;
ZSTD_strategy const strategy = cctxParams->cParams.strategy;
BYTE* lastNCount = NULL;
ZSTD_STATIC_ASSERT(HUF_WORKSPACE_SIZE >= (1<<MAX(MLFSELog,LLFSELog)));
+ DEBUGLOG(5, "ZSTD_compressSequences_internal");
/* Compress literals */
{ const BYTE* const literals = seqStorePtr->litStart;
}
}
+ DEBUGLOG(5, "compressed block size : %u", (unsigned)(op - ostart));
return op - ostart;
}
{
ZSTD_matchState_t* const ms = &zc->blockState.matchState;
size_t cSize;
- DEBUGLOG(5, "ZSTD_compressBlock_internal (dstCapacity=%zu, dictLimit=%u, nextToUpdate=%u)",
- dstCapacity, ms->window.dictLimit, ms->nextToUpdate);
+ DEBUGLOG(5, "ZSTD_compressBlock_internal (dstCapacity=%u, dictLimit=%u, nextToUpdate=%u)",
+ (U32)dstCapacity, ms->window.dictLimit, ms->nextToUpdate);
assert(srcSize <= ZSTD_BLOCKSIZE_MAX);
/* Assert that we have correctly flushed the ctx params into the ms's copy */
ms->opt.symbolCosts = &zc->blockState.prevCBlock->entropy; /* required for optimal parser to read stats from dictionary */
/* a gap between an attached dict and the current window is not safe,
- * they must remain adjacent, and when that stops being the case, the dict
- * must be unset */
+ * they must remain adjacent,
+ * and when that stops being the case, the dict must be unset */
assert(ms->dictMatchState == NULL || ms->loadedDictEnd == ms->window.dictLimit);
/* limited update after a very long match */
* dictMatchState mode, lowLimit and dictLimit are the same, and the dictionary
* is below them. forceWindow and dictMatchState are therefore incompatible.
*/
-MEM_STATIC void ZSTD_window_enforceMaxDist(ZSTD_window_t* window,
- void const* srcEnd, U32 maxDist,
- U32* loadedDictEndPtr,
- const ZSTD_matchState_t** dictMatchStatePtr)
+MEM_STATIC void
+ZSTD_window_enforceMaxDist(ZSTD_window_t* window,
+ void const* srcEnd,
+ U32 maxDist,
+ U32* loadedDictEndPtr,
+ const ZSTD_matchState_t** dictMatchStatePtr)
{
- U32 const current = (U32)((BYTE const*)srcEnd - window->base);
- U32 loadedDictEnd = loadedDictEndPtr != NULL ? *loadedDictEndPtr : 0;
- DEBUGLOG(5, "ZSTD_window_enforceMaxDist: current=%u, maxDist=%u", current, maxDist);
- if (current > maxDist + loadedDictEnd) {
- U32 const newLowLimit = current - maxDist;
+ U32 const blockEndIdx = (U32)((BYTE const*)srcEnd - window->base);
+ U32 loadedDictEnd = (loadedDictEndPtr != NULL) ? *loadedDictEndPtr : 0;
+ DEBUGLOG(5, "ZSTD_window_enforceMaxDist: blockEndIdx=%u, maxDist=%u",
+ blockEndIdx, maxDist);
+ if (blockEndIdx > maxDist + loadedDictEnd) {
+ U32 const newLowLimit = blockEndIdx - maxDist;
if (window->lowLimit < newLowLimit) window->lowLimit = newLowLimit;
if (window->dictLimit < window->lowLimit) {
DEBUGLOG(5, "Update dictLimit to match lowLimit, from %u to %u",
*/
static void
ZSTD_rescaleFreqs(optState_t* const optPtr,
- const BYTE* const src, size_t const srcSize,
- int optLevel)
+ const BYTE* const src, size_t const srcSize,
+ int const optLevel)
{
+ DEBUGLOG(5, "ZSTD_rescaleFreqs (srcSize=%u)", (unsigned)srcSize);
optPtr->priceType = zop_dynamic;
if (optPtr->litLengthSum == 0) { /* first block : init */
- if (srcSize <= ZSTD_PREDEF_THRESHOLD) /* heuristic */
+ if (srcSize <= ZSTD_PREDEF_THRESHOLD) { /* heuristic */
+ DEBUGLOG(5, "(srcSize <= ZSTD_PREDEF_THRESHOLD) => zop_predef");
optPtr->priceType = zop_predef;
+ }
assert(optPtr->symbolCosts != NULL);
- if (optPtr->symbolCosts->huf.repeatMode == HUF_repeat_valid) { /* huffman table presumed generated by dictionary */
+ if (optPtr->symbolCosts->huf.repeatMode == HUF_repeat_valid) {
+ /* huffman table presumed generated by dictionary */
optPtr->priceType = zop_dynamic;
assert(optPtr->litFreq != NULL);
/* dynamic statistics */
{ U32 const llCode = ZSTD_LLcode(litLength);
- return (LL_bits[llCode] * BITCOST_MULTIPLIER) + (optPtr->litLengthSumBasePrice - WEIGHT(optPtr->litLengthFreq[llCode], optLevel));
+ return (LL_bits[llCode] * BITCOST_MULTIPLIER)
+ + optPtr->litLengthSumBasePrice
+ - WEIGHT(optPtr->litLengthFreq[llCode], optLevel);
}
}
FORCE_INLINE_TEMPLATE U32
ZSTD_getMatchPrice(U32 const offset,
U32 const matchLength,
- const optState_t* const optPtr,
+ const optState_t* const optPtr,
int const optLevel)
{
U32 price;
U32* largerPtr = smallerPtr + 1;
U32 dummy32; /* to be nullified at the end */
U32 const windowLow = ms->window.lowLimit;
- U32 const matchLow = windowLow ? windowLow : 1;
U32 matchEndIdx = current+8+1;
size_t bestLength = 8;
U32 nbCompares = 1U << cParams->searchLog;
assert(ip <= iend-8); /* required for h calculation */
hashTable[h] = current; /* Update Hash Table */
- while (nbCompares-- && (matchIndex >= matchLow)) {
+ assert(windowLow > 0);
+ while (nbCompares-- && (matchIndex >= windowLow)) {
U32* const nextPtr = bt + 2*(matchIndex & btMask);
size_t matchLength = MIN(commonLengthSmaller, commonLengthLarger); /* guaranteed minimum nb of common bytes */
assert(matchIndex < current);
const BYTE* const base = ms->window.base;
U32 const target = (U32)(ip - base);
U32 idx = ms->nextToUpdate;
- DEBUGLOG(5, "ZSTD_updateTree_internal, from %u to %u (dictMode:%u)",
+ DEBUGLOG(6, "ZSTD_updateTree_internal, from %u to %u (dictMode:%u)",
idx, target, dictMode);
while(idx < target)
ZSTD_compressBlock_opt_generic(ZSTD_matchState_t* ms,
seqStore_t* seqStore,
U32 rep[ZSTD_REP_NUM],
- const void* src, size_t srcSize,
- const int optLevel, const ZSTD_dictMode_e dictMode)
+ const void* src, size_t srcSize,
+ const int optLevel,
+ const ZSTD_dictMode_e dictMode)
{
optState_t* const optStatePtr = &ms->opt;
const BYTE* const istart = (const BYTE*)src;
ZSTD_optimal_t lastSequence;
/* init */
- DEBUGLOG(5, "ZSTD_compressBlock_opt_generic");
+ DEBUGLOG(5, "ZSTD_compressBlock_opt_generic: current=%u, prefix=%u, nextToUpdate=%u",
+ (U32)(ip - base), ms->window.dictLimit, ms->nextToUpdate);
assert(optLevel <= 2);
ms->nextToUpdate3 = ms->nextToUpdate;
ZSTD_rescaleFreqs(optStatePtr, (const BYTE*)src, srcSize, optLevel);