void* workSpace;
size_t workSpaceSize;
size_t blockSize;
+ ZSTD_allocFunction customAlloc;
+ ZSTD_freeFunction customFree;
seqStore_t seqStore; /* sequences storage ptrs */
U32* hashTable;
ZSTD_CCtx* ZSTD_createCCtx(void)
{
- return (ZSTD_CCtx*) calloc(1, sizeof(ZSTD_CCtx));
+ ZSTD_CCtx* ctx = (ZSTD_CCtx*) calloc(1, sizeof(ZSTD_CCtx));
+ if (!ctx) return NULL;
+
+ ctx->customAlloc = malloc;
+ ctx->customFree = free;
+ return ctx;
+}
+
+ZSTD_CCtx* ZSTD_createCCtx_advanced(ZSTD_customMem customMem)
+{
+ if (!customMem.customAlloc || !customMem.customFree)
+ return ZSTD_createCCtx();
+
+ ZSTD_CCtx* ctx = (ZSTD_CCtx*) customMem.customAlloc(sizeof(ZSTD_CCtx));
+ if (!ctx) return NULL;
+
+ memset(ctx, 0, sizeof(ZSTD_CCtx));
+ ctx->customAlloc = customMem.customAlloc;
+ ctx->customFree = customMem.customFree;
+ return ctx;
}
size_t ZSTD_freeCCtx(ZSTD_CCtx* cctx)
{
- free(cctx->workSpace);
- free(cctx);
+ cctx->customFree(cctx->workSpace);
+ cctx->customFree(cctx);
return 0; /* reserved as a potential error code in the future */
}
size_t const neededSpace = tableSpace + (256*sizeof(U32)) /* huffTable */ + tokenSpace
+ ((params.cParams.strategy == ZSTD_btopt) ? optSpace : 0);
if (zc->workSpaceSize < neededSpace) {
- free(zc->workSpace);
- zc->workSpace = malloc(neededSpace);
+ zc->customFree(zc->workSpace);
+ zc->workSpace = zc->customAlloc(neededSpace);
if (zc->workSpace == NULL) return ERROR(memory_allocation);
zc->workSpaceSize = neededSpace;
} }
BYTE* const ostart = (BYTE*)dst;
BYTE* op = ostart;
const U32 maxDist = 1 << zc->params.cParams.windowLog;
- ZSTD_stats_t* stats = malloc(sizeof(ZSTD_stats_t));
+ ZSTD_stats_t* stats = (ZSTD_stats_t*) zc->customAlloc(sizeof(ZSTD_stats_t));
if (!stats) return ERROR(memory_allocation);
zc->seqStore.stats = stats;
ZSTD_statsInit(stats);
size_t cSize;
ZSTD_statsResetFreqs(stats);
- if (dstCapacity < ZSTD_blockHeaderSize + MIN_CBLOCK_SIZE) return ERROR(dstSize_tooSmall); /* not enough space to store compressed block */
+ if (dstCapacity < ZSTD_blockHeaderSize + MIN_CBLOCK_SIZE) { zc->customFree(stats); return ERROR(dstSize_tooSmall); } /* not enough space to store compressed block */
if (remaining < blockSize) blockSize = remaining;
if ((U32)(ip+blockSize - zc->base) > zc->loadedDictEnd + maxDist) {
}
cSize = ZSTD_compressBlock_internal(zc, op+ZSTD_blockHeaderSize, dstCapacity-ZSTD_blockHeaderSize, ip, blockSize);
- if (ZSTD_isError(cSize)) return cSize;
+ if (ZSTD_isError(cSize)) { zc->customFree(stats); return cSize; }
if (cSize == 0) { /* block is not compressible */
cSize = ZSTD_noCompressBlock(op, dstCapacity, ip, blockSize);
- if (ZSTD_isError(cSize)) return cSize;
+ if (ZSTD_isError(cSize)) { zc->customFree(stats); return cSize; }
} else {
op[0] = (BYTE)(cSize>>16);
op[1] = (BYTE)(cSize>>8);
}
ZSTD_statsPrint(stats, zc->params.cParams.searchLength);
- free(stats);
+ zc->customFree(stats);
return op-ostart;
}
size_t result;
ZSTD_CCtx ctxBody;
memset(&ctxBody, 0, sizeof(ctxBody));
+ ctxBody.customAlloc = malloc;
+ ctxBody.customFree = free;
result = ZSTD_compressCCtx(&ctxBody, dst, dstCapacity, src, srcSize, compressionLevel);
- free(ctxBody.workSpace); /* can't free ctxBody, since it's on stack; just free heap content */
+ ctxBody.customFree(ctxBody.workSpace); /* can't free ctxBody, since it's on stack; just free heap content */
return result;
}