}
+/* *******************************
+* Common parser - greedy strategy
+*********************************/
+FORCE_INLINE
+void ZSTD_compressBlock_greedy_generic(ZSTD_CCtx* ctx,
+ const void* src, size_t srcSize)
+{
+ seqStore_t* seqStorePtr = &(ctx->seqStore);
+ const BYTE* const istart = (const BYTE*)src;
+ const BYTE* ip = istart;
+ const BYTE* anchor = istart;
+ const BYTE* const iend = istart + srcSize;
+ const BYTE* const ilimit = iend - 8;
+ const BYTE* const base = ctx->base + ctx->dictLimit;
+
+ const U32 maxSearches = 1 << ctx->params.cParams.searchLog;
+ const U32 mls = ctx->params.cParams.searchLength;
+
+ /* init */
+ U32 rep[ZSTD_REP_INIT];
+ for (U32 i=0; i<ZSTD_REP_INIT; i++)
+ rep[i]=REPCODE_STARTVALUE;
+
+ ZSTD_resetSeqStore(seqStorePtr);
+ if ((ip-base) < REPCODE_STARTVALUE) ip = base + REPCODE_STARTVALUE;
+
+ /* Match Loop */
+ while (ip < ilimit) {
+ size_t matchLength=0;
+ size_t offset=0;
+ const BYTE* start=ip+1;
+
+ /* check repCode */
+ if (MEM_read32(start) == MEM_read32(start - rep[0])) {
+ /* repcode : we take it */
+ matchLength = ZSTD_count(start+MINMATCH, start+MINMATCH-rep[0], iend) + MINMATCH;
+ goto _storeSequence;
+ }
+
+ /* first search (depth 0) */
+ { size_t offsetFound = 99999999;
+ size_t const ml2 = ZSTD_HcFindBestMatch_selectMLS(ctx, ip, iend, &offsetFound, maxSearches, mls);
+ if (ml2 > matchLength)
+ matchLength = ml2, start = ip, offset=offsetFound + ZSTD_REP_MOVE;
+ }
+
+ if (matchLength < MINMATCH) {
+ ip += ((ip-anchor) >> g_searchStrength) + 1; /* jump faster over incompressible sections */
+ continue;
+ }
+
+ /* catch up */
+ while ((start>anchor) && (start>base+offset-ZSTD_REP_MOVE) && (start[-1] == start[-1-offset+ZSTD_REP_MOVE])) /* only search for offset within prefix */
+ { start--; matchLength++; }
+ rep[1] = rep[0]; rep[0] = offset - ZSTD_REP_MOVE;
+
+_storeSequence:
+ /* store sequence */
+ { size_t const litLength = start - anchor;
+ ZSTD_storeSeq(seqStorePtr, litLength, anchor, offset, matchLength-MINMATCH);
+ anchor = ip = start + matchLength;
+ }
+
+ /* check immediate repcode */
+ while ( (ip <= ilimit)
+ && (MEM_read32(ip) == MEM_read32(ip - rep[1])) ) {
+ /* store sequence */
+ matchLength = ZSTD_count(ip+MINMATCH, ip+MINMATCH-rep[1], iend);
+ offset = rep[1]; rep[1] = rep[0]; rep[0] = offset; /* swap offset history */
+ ZSTD_storeSeq(seqStorePtr, 0, anchor, 0, matchLength);
+ ip += matchLength+MINMATCH;
+ anchor = ip;
+ continue; /* faster when present ... (?) */
+ } }
+
+ /* Last Literals */
+ { size_t const lastLLSize = iend - anchor;
+ memcpy(seqStorePtr->lit, anchor, lastLLSize);
+ seqStorePtr->lit += lastLLSize;
+ ZSTD_statsUpdatePrices(&seqStorePtr->stats, lastLLSize, anchor, 0, 0);
+ }
+}
+
+
+FORCE_INLINE
+void ZSTD_compressBlock_greedy_extDict_generic(ZSTD_CCtx* ctx,
+ const void* src, size_t srcSize)
+{
+ seqStore_t* seqStorePtr = &(ctx->seqStore);
+ const BYTE* const istart = (const BYTE*)src;
+ const BYTE* ip = istart;
+ const BYTE* anchor = istart;
+ const BYTE* const iend = istart + srcSize;
+ const BYTE* const ilimit = iend - 8;
+ const BYTE* const base = ctx->base;
+ const U32 dictLimit = ctx->dictLimit;
+ const BYTE* const prefixStart = base + dictLimit;
+ const BYTE* const dictBase = ctx->dictBase;
+ const BYTE* const dictEnd = dictBase + dictLimit;
+ const BYTE* const dictStart = dictBase + ctx->lowLimit;
+
+ const U32 maxSearches = 1 << ctx->params.cParams.searchLog;
+ const U32 mls = ctx->params.cParams.searchLength;
+
+ /* init */
+ U32 rep[ZSTD_REP_INIT];
+ for (U32 i=0; i<ZSTD_REP_INIT; i++)
+ rep[i]=REPCODE_STARTVALUE;
+
+ ZSTD_resetSeqStore(seqStorePtr);
+ if ((ip - prefixStart) < REPCODE_STARTVALUE) ip += REPCODE_STARTVALUE;
+
+ /* Match Loop */
+ while (ip < ilimit) {
+ size_t matchLength=0;
+ size_t offset=0;
+ const BYTE* start=ip+1;
+ U32 current = (U32)(start-base);
+
+ /* check repCode */
+ {
+ const U32 repIndex = (U32)(current - rep[0]);
+ const BYTE* const repBase = repIndex < dictLimit ? dictBase : base;
+ const BYTE* const repMatch = repBase + repIndex;
+ if ((U32)((dictLimit-1) - repIndex) >= 3) /* intentional overflow */
+ if (MEM_read32(start) == MEM_read32(repMatch)) {
+ /* repcode detected we should take it */
+ const BYTE* const repEnd = repIndex < dictLimit ? dictEnd : iend;
+ matchLength = ZSTD_count_2segments(start+MINMATCH, repMatch+MINMATCH, iend, repEnd, prefixStart) + MINMATCH;
+ goto _storeSequence;
+ } }
+
+ /* first search (depth 0) */
+ { size_t offsetFound = 99999999;
+ size_t const ml2 = ZSTD_HcFindBestMatch_extDict_selectMLS(ctx, ip, iend, &offsetFound, maxSearches, mls);
+ if (ml2 > matchLength)
+ matchLength = ml2, start = ip, offset=offsetFound + ZSTD_REP_MOVE;
+ }
+
+ if (matchLength < MINMATCH) {
+ ip += ((ip-anchor) >> g_searchStrength) + 1; /* jump faster over incompressible sections */
+ continue;
+ }
+
+ /* catch up */
+ if (offset >= ZSTD_REP_NUM) {
+ U32 matchIndex = (U32)((start-base) - (offset - ZSTD_REP_MOVE));
+ const BYTE* match = (matchIndex < dictLimit) ? dictBase + matchIndex : base + matchIndex;
+ const BYTE* const mStart = (matchIndex < dictLimit) ? dictStart : prefixStart;
+ while ((start>anchor) && (match>mStart) && (start[-1] == match[-1])) { start--; match--; matchLength++; }
+ rep[1] = rep[0]; rep[0] = offset - ZSTD_REP_MOVE;
+ }
+
+_storeSequence:
+ /* store sequence */
+ { size_t const litLength = start - anchor;
+ ZSTD_storeSeq(seqStorePtr, litLength, anchor, offset, matchLength-MINMATCH);
+ anchor = ip = start + matchLength;
+ }
+
+ /* check immediate repcode */
+ while (ip <= ilimit) {
+ const U32 repIndex = (U32)((ip-base) - rep[1]);
+ const BYTE* const repBase = repIndex < dictLimit ? dictBase : base;
+ const BYTE* const repMatch = repBase + repIndex;
+ if ((U32)((dictLimit-1) - repIndex) >= 3) /* intentional overflow */
+ if (MEM_read32(ip) == MEM_read32(repMatch)) {
+ /* repcode detected we should take it */
+ const BYTE* const repEnd = repIndex < dictLimit ? dictEnd : iend;
+ matchLength = ZSTD_count_2segments(ip+MINMATCH, repMatch+MINMATCH, iend, repEnd, prefixStart) + MINMATCH;
+ offset = rep[1]; rep[1] = rep[0]; rep[0] = offset; /* swap offset history */
+ ZSTD_storeSeq(seqStorePtr, 0, anchor, 0, matchLength-MINMATCH);
+ ip += matchLength;
+ anchor = ip;
+ continue; /* faster when present ... (?) */
+ }
+ break;
+ } }
+
+ /* Last Literals */
+ { size_t const lastLLSize = iend - anchor;
+ memcpy(seqStorePtr->lit, anchor, lastLLSize);
+ seqStorePtr->lit += lastLLSize;
+ }
+}
+
+
+
/* *******************************
* Common parser - lazy strategy
*********************************/
const U32 maxSearches = 1 << ctx->params.cParams.searchLog;
const U32 mls = ctx->params.cParams.searchLength;
- const U32 repStart = (ctx->params.cParams.strategy == ZSTD_greedy) ? 0 : 0;
- const U32 repNum = (ctx->params.cParams.strategy == ZSTD_greedy) ? 2 : ZSTD_REP_NUM;
typedef size_t (*searchMax_f)(ZSTD_CCtx* zc, const BYTE* ip, const BYTE* iLimit,
size_t* offsetPtr,
const BYTE* start=ip;
/* check repCode */
- for (U32 i=repStart; i<repNum; i++)
+ for (U32 i=0; i<ZSTD_REP_NUM; i++)
if (MEM_read32(ip) == MEM_read32(ip - rep[i])) {
/* repcode : we take it */
if (matchLength==0) {
matchLength = ZSTD_count(ip+MINMATCH, ip+MINMATCH-rep[i], iend) + MINMATCH;
offset = i;
- if (depth==0) goto _storeSequence;
} else {
size_t mlRep = ZSTD_count(ip+MINMATCH, ip+MINMATCH-rep[i], iend) + MINMATCH;
int gain2 = (int)(mlRep * 3 /*- ZSTD_highbit((U32)i+1)*/ + (i==1));
if (depth>=1)
while (ip<ilimit) {
ip ++;
- for (U32 i=repStart; i<repNum; i++)
+ for (U32 i=0; i<ZSTD_REP_NUM; i++)
if (MEM_read32(ip) == MEM_read32(ip - rep[i])) {
size_t const mlRep = ZSTD_count(ip+MINMATCH, ip+MINMATCH-rep[i], iend) + MINMATCH;
int const gain2 = (int)(mlRep * 3);
/* let's find an even better one */
if ((depth==2) && (ip<ilimit)) {
ip ++;
- for (U32 i=repStart; i<repNum; i++)
+ for (U32 i=0; i<ZSTD_REP_NUM; i++)
if (MEM_read32(ip) == MEM_read32(ip - rep[i])) {
size_t const ml2 = ZSTD_count(ip+MINMATCH, ip+MINMATCH-rep[i], iend) + MINMATCH;
int const gain2 = (int)(ml2 * 4);
}
/* store sequence */
-_storeSequence:
{
if (offset >= ZSTD_REP_NUM) {
rep[2] = rep[1];
static void ZSTD_compressBlock_greedy(ZSTD_CCtx* ctx, const void* src, size_t srcSize)
{
- ZSTD_compressBlock_lazy_generic(ctx, src, srcSize, 0, 0);
+ ZSTD_compressBlock_greedy_generic(ctx, src, srcSize);
}
const U32 maxSearches = 1 << ctx->params.cParams.searchLog;
const U32 mls = ctx->params.cParams.searchLength;
- const U32 repNum = (ctx->params.cParams.strategy == ZSTD_greedy) ? 1 : ZSTD_REP_NUM;
typedef size_t (*searchMax_f)(ZSTD_CCtx* zc, const BYTE* ip, const BYTE* iLimit,
size_t* offsetPtr,
U32 current = (U32)(ip-base);
/* check repCode */
- for (U32 i=0; i<repNum; i++) {
+ for (U32 i=0; i<ZSTD_REP_NUM; i++) {
const U32 repIndex = (U32)(current - rep[i]);
const BYTE* const repBase = repIndex < dictLimit ? dictBase : base;
const BYTE* const repMatch = repBase + repIndex;
if (matchLength==0) {
offset = i;
matchLength = ZSTD_count_2segments(ip+MINMATCH, repMatch+MINMATCH, iend, repEnd, prefixStart) + MINMATCH;
- if (depth==0) goto _storeSequence;
} else {
size_t mlRep = ZSTD_count_2segments(ip+MINMATCH, repMatch+MINMATCH, iend, repEnd, prefixStart) + MINMATCH;
int gain2 = (int)(mlRep * 3 /*- ZSTD_highbit((U32)i+1)*/ + (i==1));
ip ++;
current++;
/* check repCode */
- for (U32 i=0; i<repNum; i++) {
+ for (U32 i=0; i<ZSTD_REP_NUM; i++) {
const U32 repIndex = (U32)(current - rep[i]);
const BYTE* const repBase = repIndex < dictLimit ? dictBase : base;
const BYTE* const repMatch = repBase + repIndex;
ip ++;
current++;
/* check repCode */
- for (U32 i=0; i<repNum; i++) {
+ for (U32 i=0; i<ZSTD_REP_NUM; i++) {
const U32 repIndex = (U32)(current - rep[i]);
const BYTE* const repBase = repIndex < dictLimit ? dictBase : base;
const BYTE* const repMatch = repBase + repIndex;
}
/* store sequence */
-_storeSequence:
{
if (offset >= ZSTD_REP_NUM) {
rep[2] = rep[1];
void ZSTD_compressBlock_greedy_extDict(ZSTD_CCtx* ctx, const void* src, size_t srcSize)
{
- ZSTD_compressBlock_lazy_extDict_generic(ctx, src, srcSize, 0, 0);
+ ZSTD_compressBlock_greedy_extDict_generic(ctx, src, srcSize);
}
static void ZSTD_compressBlock_lazy_extDict(ZSTD_CCtx* ctx, const void* src, size_t srcSize)