U32 hashLog3; /* dispatch table : larger == faster, more memory */
U32 loadedDictEnd;
U32 stage; /* 0: created; 1: init,dictLoad; 2:started */
+ U32 rep[ZSTD_REP_NUM];
+ U32 savedRep[ZSTD_REP_NUM];
U32 dictID;
ZSTD_parameters params;
void* workSpace;
zc->params = params;
zc->blockSize = blockSize;
zc->frameContentSize = frameContentSize;
+ { int i; for (i=0; i<ZSTD_REP_NUM; i++) zc->rep[i] = repStartValue[i]; }
if (params.cParams.strategy == ZSTD_btopt) {
zc->seqStore.litFreq = (U32*)(zc->seqStore.buffer);
size_t const maxCSize = srcSize - minGain;
if ((size_t)(op-ostart) >= maxCSize) return 0; }
+ /* confirm repcodes */
+ { int i; for (i=0; i<ZSTD_REP_NUM; i++) zc->rep[i] = zc->savedRep[i]; }
+
return op - ostart;
}
*/
MEM_STATIC void ZSTD_storeSeq(seqStore_t* seqStorePtr, size_t litLength, const BYTE* literals, size_t offsetCode, size_t matchCode)
{
-#if 0 /* for debug */
+#if 1 /* for debug */
static const BYTE* g_start = NULL;
const U32 pos = (U32)(literals - g_start);
if (g_start==NULL) g_start = literals;
- if ((pos > 2587900) && (pos < 2588050))
+ //if ((pos > 1) && (pos < 50000))
printf("Cpos %6u :%5u literals & match %3u bytes at distance %6u \n",
pos, (U32)litLength, (U32)matchCode+MINMATCH, (U32)offsetCode);
#endif
FORCE_INLINE
-void ZSTD_compressBlock_fast_generic(ZSTD_CCtx* zc,
+void ZSTD_compressBlock_fast_generic(ZSTD_CCtx* cctx,
const void* src, size_t srcSize,
const U32 mls)
{
- U32* const hashTable = zc->hashTable;
- const U32 hBits = zc->params.cParams.hashLog;
- seqStore_t* seqStorePtr = &(zc->seqStore);
- const BYTE* const base = zc->base;
+ U32* const hashTable = cctx->hashTable;
+ const U32 hBits = cctx->params.cParams.hashLog;
+ seqStore_t* seqStorePtr = &(cctx->seqStore);
+ const BYTE* const base = cctx->base;
const BYTE* const istart = (const BYTE*)src;
const BYTE* ip = istart;
const BYTE* anchor = istart;
- const U32 lowIndex = zc->dictLimit;
- const BYTE* const lowest = base + lowIndex;
+ const U32 lowestIndex = cctx->dictLimit;
+ const BYTE* const lowest = base + lowestIndex;
const BYTE* const iend = istart + srcSize;
const BYTE* const ilimit = iend - 8;
- size_t offset_2=REPCODE_STARTVALUE, offset_1=REPCODE_STARTVALUE;
+ size_t offset_1=cctx->rep[0], offset_2=cctx->rep[1];
/* init */
ZSTD_resetSeqStore(seqStorePtr);
- if (ip < lowest+REPCODE_STARTVALUE) ip = lowest+REPCODE_STARTVALUE;
+ ip += (ip==lowest);
+ { U32 const maxRep = (U32)(ip-lowest);
+ if (offset_1 > maxRep) offset_1 = 0;
+ if (offset_2 > maxRep) offset_2 = 0;
+ }
/* Main Search Loop */
- while (ip < ilimit) { /* < instead of <=, because repcode check at (ip+1) */
+ while (ip < ilimit) { /* < instead of <=, because repcode check at (ip+1) */
size_t mlCode;
size_t offset;
size_t const h = ZSTD_hashPtr(ip, hBits, mls);
ip++;
ZSTD_storeSeq(seqStorePtr, ip-anchor, anchor, 0, mlCode-MINMATCH);
} else {
- if ( (matchIndex <= lowIndex) ||
+ if ( (matchIndex <= lowestIndex) ||
(MEM_read32(match) != MEM_read32(ip)) ) {
ip += ((ip-anchor) >> g_searchStrength) + 1;
continue;
hashTable[ZSTD_hashPtr(ip-2, hBits, mls)] = (U32)(ip-2-base);
/* check immediate repcode */
while ( (ip <= ilimit)
- && ( (offset>0)
+ && ( (offset_2>0)
& (MEM_read32(ip) == MEM_read32(ip - offset_2)) )) {
/* store sequence */
size_t const rlCode = ZSTD_count(ip+EQUAL_READ32, ip+EQUAL_READ32-offset_2, iend) + EQUAL_READ32;
continue; /* faster when present ... (?) */
} } }
+ /* save reps for next block */
+ cctx->savedRep[0] = offset_1 ? offset_1 : (U32)(iend-base);
+ cctx->savedRep[1] = offset_2 ? offset_2 : (U32)(iend-base);
+
/* Last Literals */
{ size_t const lastLLSize = iend - anchor;
memcpy(seqStorePtr->lit, anchor, lastLLSize);
const BYTE* const dictEnd = dictBase + dictLimit;
const BYTE* const iend = istart + srcSize;
const BYTE* const ilimit = iend - 8;
-
- U32 offset_2=REPCODE_STARTVALUE, offset_1=REPCODE_STARTVALUE;
-
+ U32 offset_1=ctx->rep[0], offset_2=ctx->rep[1];
/* init */
ZSTD_resetSeqStore(seqStorePtr);
/* skip first position to avoid read overflow during repcode match check */
- hashTable[ZSTD_hashPtr(ip+0, hBits, mls)] = (U32)(ip-base+0);
- ip += REPCODE_STARTVALUE;
+ hashTable[ZSTD_hashPtr(ip, hBits, mls)] = (U32)(ip-base);
+ ip++;
/* Main Search Loop */
while (ip < ilimit) { /* < instead of <=, because (ip+1) */
U32 offset;
hashTable[h] = current; /* update hash table */
- if ( ((repIndex >= dictLimit) | ((repIndex <= dictLimit-4) & (repIndex>lowestIndex)))
- && (MEM_read32(repMatch) == MEM_read32(ip+1)) ) {
+ if ( (((U32)((dictLimit-1) - repIndex) >= 3) & (repIndex > lowestIndex)) /* intentional overflow */
+ && (MEM_read32(repMatch) == MEM_read32(ip+1)) ) {
const BYTE* repMatchEnd = repIndex < dictLimit ? dictEnd : iend;
mlCode = ZSTD_count_2segments(ip+1+EQUAL_READ32, repMatch+EQUAL_READ32, iend, repMatchEnd, lowPrefixPtr) + EQUAL_READ32;
ip++;
U32 const current2 = (U32)(ip-base);
U32 const repIndex2 = current2 - offset_2;
const BYTE* repMatch2 = repIndex2 < dictLimit ? dictBase + repIndex2 : base + repIndex2;
- if ( ( ((repIndex2>lowestIndex) & (repIndex2 <= dictLimit-4)) | (repIndex2 >= dictLimit) )
- && (MEM_read32(repMatch2) == MEM_read32(ip)) ) {
+ if ( (((U32)((dictLimit-1) - repIndex2) >= 3) & (repIndex2 > lowestIndex)) /* intentional overflow */
+ && (MEM_read32(repMatch2) == MEM_read32(ip)) ) {
const BYTE* const repEnd2 = repIndex2 < dictLimit ? dictEnd : iend;
size_t repLength2 = ZSTD_count_2segments(ip+EQUAL_READ32, repMatch2+EQUAL_READ32, iend, repEnd2, lowPrefixPtr) + EQUAL_READ32;
U32 tmpOffset = offset_2; offset_2 = offset_1; offset_1 = tmpOffset; /* swap offset_2 <=> offset_1 */
break;
} } }
+ /* save reps for next block */
+ ctx->savedRep[0] = offset_1; ctx->savedRep[1] = offset_2;
+
/* Last Literals */
{ size_t const lastLLSize = iend - anchor;
memcpy(seqStorePtr->lit, anchor, lastLLSize);
size_t* offsetPtr,
U32 maxNbAttempts, U32 matchLengthSearch);
searchMax_f const searchMax = searchMethod ? ZSTD_BtFindBestMatch_selectMLS : ZSTD_HcFindBestMatch_selectMLS;
-
- /* init */
U32 rep[ZSTD_REP_INIT];
- { U32 i ; for (i=0; i<ZSTD_REP_INIT; i++) rep[i]=REPCODE_STARTVALUE; }
+ /* init */
+ ip += (ip==base);
ctx->nextToUpdate3 = ctx->nextToUpdate;
ZSTD_resetSeqStore(seqStorePtr);
- if ((ip-base) < REPCODE_STARTVALUE) ip = base + REPCODE_STARTVALUE;
+ { U32 i;
+ U32 const maxRep = (U32)(ip-base);
+ for (i=0; i<ZSTD_REP_INIT; i++) {
+ rep[i]=ctx->rep[i];
+ if (rep[i]>maxRep) rep[i]=0;
+ } }
/* Match Loop */
while (ip < ilimit) {
continue; /* faster when present ... (?) */
} }
+ /* Save reps for next block */
+ { int i;
+ for (i=0; i<ZSTD_REP_NUM; i++) {
+ if (!rep[i]) rep[i] = (U32)(iend-base); /* in case some zero are left */
+ ctx->savedRep[i] = rep[i];
+ } }
+
/* Last Literals */
{ size_t const lastLLSize = iend - anchor;
memcpy(seqStorePtr->lit, anchor, lastLLSize);
/* init */
U32 rep[ZSTD_REP_INIT];
- { U32 i; for (i=0; i<ZSTD_REP_INIT; i++) rep[i]=REPCODE_STARTVALUE; }
+ { U32 i; for (i=0; i<ZSTD_REP_INIT; i++) rep[i]=ctx->rep[i]; }
ctx->nextToUpdate3 = ctx->nextToUpdate;
ZSTD_resetSeqStore(seqStorePtr);
- if ((ip - prefixStart) < REPCODE_STARTVALUE) ip += REPCODE_STARTVALUE;
+ ip += (ip == prefixStart);
/* Match Loop */
while (ip < ilimit) {
U32 current = (U32)(ip-base);
/* check repCode */
- {
- const U32 repIndex = (U32)(current+1 - rep[0]);
+ { const U32 repIndex = (U32)(current+1 - rep[0]);
const BYTE* const repBase = repIndex < dictLimit ? dictBase : base;
const BYTE* const repMatch = repBase + repIndex;
if (((U32)((dictLimit-1) - repIndex) >= 3) & (repIndex > lowestIndex)) /* intentional overflow */
break;
} }
+ /* Save reps for next block */
+ ctx->savedRep[0] = rep[0]; ctx->savedRep[1] = rep[1]; ctx->savedRep[2] = rep[2];
+
/* Last Literals */
{ size_t const lastLLSize = iend - anchor;
memcpy(seqStorePtr->lit, anchor, lastLLSize);
ZSTD_optimal_t* opt = seqStorePtr->priceTable;
ZSTD_match_t* matches = seqStorePtr->matchTable;
const BYTE* inr;
-
- /* init */
U32 offset, rep[ZSTD_REP_INIT];
- { U32 i; for (i=0; i<ZSTD_REP_INIT; i++) rep[i]=REPCODE_STARTVALUE; }
+ /* init */
ctx->nextToUpdate3 = ctx->nextToUpdate;
ZSTD_resetSeqStore(seqStorePtr);
ZSTD_rescaleFreqs(seqStorePtr);
- if ((ip-prefixStart) < REPCODE_STARTVALUE) ip = prefixStart + REPCODE_STARTVALUE;
+ ip += (ip==prefixStart);
+ { U32 i;
+ U32 const maxRep = (ip-prefixStart);
+ for (i=0; i<ZSTD_REP_INIT; i++) {
+ rep[i]=ctx->rep[i];
+ if (rep[i]>maxRep) rep[i]=0;
+ } }
ZSTD_LOG_BLOCK("%d: COMPBLOCK_OPT_GENERIC srcSz=%d maxSrch=%d mls=%d sufLen=%d\n", (int)(ip-base), (int)srcSize, maxSearches, mls, sufficient_len);
anchor = ip = ip + mlen;
} } /* for (cur=0; cur < last_pos; ) */
- { /* Last Literals */
- size_t lastLLSize = iend - anchor;
+ /* Save reps for next block */
+ { int i;
+ for (i=0; i<ZSTD_REP_NUM; i++) {
+ if (!rep[i]) rep[i] = (U32)(iend-base); /* in case some zero are left */
+ ctx->savedRep[i] = rep[i];
+ } }
+
+ /* Last Literals */
+ { size_t lastLLSize = iend - anchor;
ZSTD_LOG_ENCODE("%d: lastLLSize literals=%u\n", (int)(ip-base), (U32)lastLLSize);
memcpy(seqStorePtr->lit, anchor, lastLLSize);
seqStorePtr->lit += lastLLSize;
/* init */
U32 offset, rep[ZSTD_REP_INIT];
- { U32 i; for (i=0; i<ZSTD_REP_INIT; i++) rep[i]=REPCODE_STARTVALUE; }
+ { U32 i; for (i=0; i<ZSTD_REP_INIT; i++) rep[i]=ctx->rep[i]; }
ctx->nextToUpdate3 = ctx->nextToUpdate;
ZSTD_resetSeqStore(seqStorePtr);
ZSTD_rescaleFreqs(seqStorePtr);
- if ((ip - prefixStart) < REPCODE_STARTVALUE) ip += REPCODE_STARTVALUE;
+ ip += (ip==prefixStart);
ZSTD_LOG_BLOCK("%d: COMPBLOCK_OPT_EXTDICT srcSz=%d maxSrch=%d mls=%d sufLen=%d\n", (int)(ip-base), (int)srcSize, maxSearches, mls, sufficient_len);
anchor = ip = ip + mlen;
} } /* for (cur=0; cur < last_pos; ) */
- { /* Last Literals */
- size_t lastLLSize = iend - anchor;
+ /* Save reps for next block */
+ ctx->savedRep[0] = rep[0]; ctx->savedRep[1] = rep[1]; ctx->savedRep[2] = rep[2];
+
+ /* Last Literals */
+ { size_t lastLLSize = iend - anchor;
ZSTD_LOG_ENCODE("%d: lastLLSize literals=%u\n", (int)(ip-base), (U32)(lastLLSize));
memcpy(seqStorePtr->lit, anchor, lastLLSize);
seqStorePtr->lit += lastLLSize;
const void* vBase;
const void* dictEnd;
size_t expected;
+ U32 rep[3];
ZSTD_frameParams fParams;
blockType_t bType; /* used in ZSTD_decompressContinue(), to transfer blockType between header decoding and block decoding stages */
ZSTD_dStage stage;
dctx->hufTable[0] = (HUF_DTable)((HufLog)*0x1000001);
dctx->litEntropy = dctx->fseEntropy = 0;
dctx->dictID = 0;
+ { int i; for (i=0; i<ZSTD_REP_NUM; i++) dctx->rep[i] = repStartValue[i]; }
return 0;
}
{ size_t const seqHSize = ZSTD_decodeSeqHeaders(&nbSeq, DTableLL, DTableML, DTableOffb, dctx->fseEntropy, ip, seqSize);
if (ZSTD_isError(seqHSize)) return seqHSize;
ip += seqHSize;
- dctx->fseEntropy = 1;
}
/* Regen sequences */
if (nbSeq) {
seqState_t seqState;
- { U32 i; for (i=0; i<ZSTD_REP_INIT; i++) seqState.prevOffset[i] = REPCODE_STARTVALUE; }
+ dctx->fseEntropy = 1;
+ { U32 i; for (i=0; i<ZSTD_REP_INIT; i++) seqState.prevOffset[i] = dctx->rep[i]; }
{ size_t const errorCode = BIT_initDStream(&(seqState.DStream), ip, iend-ip);
if (ERR_isError(errorCode)) return ERROR(corruption_detected); }
FSE_initDState(&(seqState.stateLL), &(seqState.DStream), DTableLL);
for ( ; (BIT_reloadDStream(&(seqState.DStream)) <= BIT_DStream_completed) && nbSeq ; ) {
nbSeq--;
- { seq_t const sequence = ZSTD_decodeSequence(&seqState);
+ { seq_t const sequence = ZSTD_decodeSequence(&seqState);
size_t const oneSeqSize = ZSTD_execSequence(op, oend, sequence, &litPtr, litLimit_8, base, vBase, dictEnd);
if (ZSTD_isError(oneSeqSize)) return oneSeqSize;
op += oneSeqSize;
/* check if reached exact end */
if (nbSeq) return ERROR(corruption_detected);
+ /* save reps for next block */
+ { U32 i; for (i=0; i<ZSTD_REP_INIT; i++) dctx->rep[i] = seqState.prevOffset[i]; }
}
/* last literal segment */