{
U32 const current = (U32)((BYTE const*)srcEnd - window->base);
U32 loadedDictEnd = loadedDictEndPtr != NULL ? *loadedDictEndPtr : 0;
+ DEBUGLOG(5, "ZSTD_window_enforceMaxDist: current=%u, maxDist=%u", current, maxDist);
if (current > maxDist + loadedDictEnd) {
U32 const newLowLimit = current - maxDist;
if (window->lowLimit < newLowLimit) window->lowLimit = newLowLimit;
if (window->dictLimit < window->lowLimit) {
- DEBUGLOG(5, "Update dictLimit from %u to %u", window->dictLimit,
- window->lowLimit);
+ DEBUGLOG(5, "Update dictLimit to match lowLimit, from %u to %u",
+ window->dictLimit, window->lowLimit);
window->dictLimit = window->lowLimit;
}
if (loadedDictEndPtr)
{
BYTE const* const ip = (BYTE const*)src;
U32 contiguous = 1;
+ DEBUGLOG(5, "ZSTD_window_update");
/* Check if blocks follow each other */
if (src != window->nextSrc) {
/* not contiguous */
size_t const distanceFromBase = (size_t)(window->nextSrc - window->base);
- DEBUGLOG(5, "Non contiguous blocks, new segment starts at %u",
- window->dictLimit);
+ DEBUGLOG(5, "Non contiguous blocks, new segment starts at %u", window->dictLimit);
window->lowLimit = window->dictLimit;
assert(distanceFromBase == (size_t)(U32)distanceFromBase); /* should never overflow */
window->dictLimit = (U32)distanceFromBase;
ptrdiff_t const highInputIdx = (ip + srcSize) - window->dictBase;
U32 const lowLimitMax = (highInputIdx > (ptrdiff_t)window->dictLimit) ? window->dictLimit : (U32)highInputIdx;
window->lowLimit = lowLimitMax;
+ DEBUGLOG(5, "Overlapping extDict and input : new lowLimit = %u", window->lowLimit);
}
return contiguous;
}
{
case ZSTD_fast:
ZSTD_fillHashTable(ms, cParams, iend, ZSTD_dtlm_fast);
- ms->nextToUpdate = (U32)(iend - ms->window.base);
break;
case ZSTD_dfast:
ZSTD_fillDoubleHashTable(ms, cParams, iend, ZSTD_dtlm_fast);
- ms->nextToUpdate = (U32)(iend - ms->window.base);
break;
case ZSTD_greedy:
unsigned const minMatch = cParams->searchLength;
ZSTD_blockCompressor const blockCompressor =
ZSTD_selectBlockCompressor(cParams->strategy, extDict);
- BYTE const* const base = ms->window.base;
/* Input bounds */
BYTE const* const istart = (BYTE const*)src;
BYTE const* const iend = istart + srcSize;
/* Input positions */
BYTE const* ip = istart;
+ DEBUGLOG(5, "ZSTD_ldm_blockCompress: srcSize=%zu", srcSize);
assert(rawSeqStore->pos <= rawSeqStore->size);
assert(rawSeqStore->size <= rawSeqStore->capacity);
/* Loop through each sequence and apply the block compressor to the lits */
ZSTD_ldm_limitTableUpdate(ms, ip);
ZSTD_ldm_fillFastTables(ms, cParams, ip);
/* Run the block compressor */
+ DEBUGLOG(5, "calling block compressor on segment of size %u", sequence.litLength);
{
size_t const newLitLength =
blockCompressor(ms, seqStore, rep, cParams, ip,
sequence.litLength);
ip += sequence.litLength;
- ms->nextToUpdate = (U32)(ip - base);
/* Update the repcodes */
for (i = ZSTD_REP_NUM - 1; i > 0; i--)
rep[i] = rep[i-1];
ZSTD_ldm_limitTableUpdate(ms, ip);
ZSTD_ldm_fillFastTables(ms, cParams, ip);
/* Compress the last literals */
- {
- size_t const lastLiterals = blockCompressor(ms, seqStore, rep, cParams,
+ { size_t const lastLiterals = blockCompressor(ms, seqStore, rep, cParams,
ip, iend - ip);
- ms->nextToUpdate = (U32)(iend - base);
return lastLiterals;
}
}
const BYTE* const base = ms->window.base;
U32 const target = (U32)(ip - base);
U32 idx = ms->nextToUpdate;
- DEBUGLOG(8, "ZSTD_updateTree_internal, from %u to %u (extDict:%u)",
+ DEBUGLOG(5, "ZSTD_updateTree_internal, from %u to %u (extDict:%u)",
idx, target, extDict);
while(idx < target)
U32 nbCompares = 1U << cParams->searchLog;
size_t bestLength = lengthToBeat-1;
- DEBUGLOG(8, "ZSTD_insertBtAndGetAllMatches");
+ DEBUGLOG(8, "ZSTD_insertBtAndGetAllMatches: current=%u", current);
/* check repCode */
{ U32 const lastR = ZSTD_REP_NUM + ll0;
ZSTD_matchState_t* ms, seqStore_t* seqStore, U32 rep[ZSTD_REP_NUM],
const ZSTD_compressionParameters* cParams, const void* src, size_t srcSize)
{
- if (ms->opt.litLengthSum==0) { /* first block */
+ DEBUGLOG(5, "ZSTD_compressBlock_btultra (srcSize=%zu)", srcSize);
+ assert(srcSize <= ZSTD_BLOCKSIZE_MAX);
+ if ( (ms->opt.litLengthSum==0) /* first block */
+ && (seqStore->sequences == seqStore->sequencesStart) /* no ldm */
+ && (ms->window.dictLimit == ms->window.lowLimit) ) { /* no dictionary */
U32 tmpRep[ZSTD_REP_NUM];
+ DEBUGLOG(5, "ZSTD_compressBlock_btultra: first block: collecting statistics");
assert(ms->nextToUpdate >= ms->window.dictLimit
&& ms->nextToUpdate <= ms->window.dictLimit + 1);
memcpy(tmpRep, rep, sizeof(tmpRep));
ZSTD_compressBlock_opt_generic(ms, seqStore, tmpRep, cParams, src, srcSize, 2 /*optLevel*/, 0 /*extDict*/); /* generate stats into ms->opt*/
ZSTD_resetSeqStore(seqStore);
- ZSTD_window_update(&ms->window, src, srcSize); /* invalidate first scan from history, since it overlaps perfectly */
+ /* invalidate first scan from history */
+ ms->window.base -= srcSize;
+ ms->window.dictLimit += srcSize;
+ ms->window.lowLimit = ms->window.dictLimit;
ms->nextToUpdate = ms->window.dictLimit;
+ ms->nextToUpdate3 = ms->window.dictLimit;
+ /* re-inforce weight of collected statistics */
ZSTD_upscaleStats(&ms->opt);
}
+ DEBUGLOG(5, "base=%p, src=%p, src-base=%zi", ms->window.base, src, (const BYTE*)src - (const BYTE*)ms->window.base);
return ZSTD_compressBlock_opt_generic(ms, seqStore, rep, cParams, src, srcSize, 2 /*optLevel*/, 0 /*extDict*/);
}