/* common lazy function, to be inlined */
FORCE_INLINE
size_t ZSTD_compressBlock_lazy_generic(ZSTD_CCtx* ctx,
- void* dst, size_t maxDstSize, const void* src, size_t srcSize,
- const U32 searchMethod, const U32 deep) /* searchMethod : 0 = hc; 1 = bt */
+ void* dst, size_t maxDstSize, const void* src, size_t srcSize,
+ const U32 searchMethod, const U32 deep) /* 0 : hc; 1 : bt */
{
seqStore_t* seqStorePtr = &(ctx->seqStore);
const BYTE* const istart = (const BYTE*)src;
/* Match Loop */
while (ip < ilimit)
{
- /* repcode */
- if (MEM_read32(ip) == MEM_read32(ip - offset_2))
- {
- /* store sequence */
- size_t matchLength = ZSTD_count(ip+MINMATCH, ip+MINMATCH-offset_2, iend);
- size_t litLength = ip-anchor;
- size_t offset = offset_2;
- offset_2 = offset_1;
- offset_1 = offset;
- ZSTD_storeSeq(seqStorePtr, litLength, anchor, 0, matchLength);
- ip += matchLength+MINMATCH;
- anchor = ip;
- continue;
- }
-
- offset_2 = offset_1; /* failed once : necessarily offset_1 now */
-
- /* repcode at ip+1 */
+ size_t matchLength;
+ size_t offset;
+
+ /* priority to repcode at ip+1 */
if (MEM_read32(ip+1) == MEM_read32(ip+1 - offset_1))
{
- size_t matchLength = ZSTD_count(ip+1+MINMATCH, ip+1+MINMATCH-offset_1, iend);
- size_t litLength = ip+1-anchor;
- ZSTD_storeSeq(seqStorePtr, litLength, anchor, 0, matchLength);
- ip += 1+matchLength+MINMATCH;
- anchor = ip;
- continue;
+ matchLength = ZSTD_count(ip+1+MINMATCH, ip+1+MINMATCH-offset_1, iend) + MINMATCH;
+ ip ++;
+ offset = 0;
}
-
- /* search */
+ else
{
- size_t offset=999999;
- size_t matchLength = ZSTD_HcFindBestMatch_selectMLS(ctx, ip, iend, &offset, maxSearches, mls);
+ /* search */
+ offset = 99999999; /* init to high value */
+ matchLength = ZSTD_HcFindBestMatch_selectMLS(ctx, ip, iend, &offset, maxSearches, mls);
if (matchLength < MINMATCH)
{
+ /* not found */
ip += ((ip-anchor) >> g_searchStrength) + 1; /* jump faster over incompressible sections */
continue;
}
- while ((ip>anchor) && (ip-offset>ctx->base) && (ip[-1] == ip[-1-offset])) { ip--; matchLength++; } /* catch up */
+ /* match found */
+ while ((ip>anchor) && (ip-offset>ctx->base) && (ip[-1] == ip[-1-offset])) { ip--; matchLength++; } /* catch up */
+ }
+
+ /* store found sequence */
+ {
+ size_t litLength = ip-anchor;
+ offset_2 = offset_1;
+ if (offset) offset_1 = offset;
+ ZSTD_storeSeq(seqStorePtr, litLength, anchor, offset, matchLength-MINMATCH);
+ ip += matchLength;
+ anchor = ip;
+ }
+
+ /* check immediate repcode */
+ while ( (ip <= ilimit)
+ && (MEM_read32(ip) == MEM_read32(ip - offset_2)) )
+ {
/* store sequence */
- {
- size_t litLength = ip-anchor;
- offset_1 = offset;
- ZSTD_storeSeq(seqStorePtr, litLength, anchor, offset_1, matchLength-MINMATCH);
- ip += matchLength;
- anchor = ip;
- }
+ size_t litLength = ip-anchor;
+ matchLength = ZSTD_count(ip+MINMATCH, ip+MINMATCH-offset_2, iend);
+ offset = offset_2;
+ offset_2 = offset_1;
+ offset_1 = offset;
+ ZSTD_storeSeq(seqStorePtr, litLength, anchor, 0, matchLength);
+ ip += matchLength+MINMATCH;
+ anchor = ip;
+ continue;
}
+
}
/* Last Literals */
size_t ZSTD_compressBlock(ZSTD_CCtx* ctx, void* dst, size_t maxDstSize, const void* src, size_t srcSize)
{
ZSTD_blockCompressor blockCompressor = ZSTD_selectBlockCompressor(ctx->params.strategy, ctx->lowLimit < ctx->dictLimit);
- if (srcSize < MIN_CBLOCK_SIZE+3) return 0; /* don't even attempt compression below a certain srcSize */
return blockCompressor(ctx, dst, maxDstSize, src, srcSize);
}
op[0] = (BYTE)(cSize>>16);
op[1] = (BYTE)(cSize>>8);
op[2] = (BYTE)cSize;
- op[0] += (BYTE)(bt_compressed << 6); /* is a compressed block */
+ op[0] += (BYTE)(bt_compressed << 6); /* is a compressed block */
cSize += 3;
}