static size_t ZSTD_estimateCCtxSize_usingCCtxParams_internal(
const ZSTD_compressionParameters* cParams,
const ldmParams_t* ldmParams,
+ const int isStatic,
const size_t buffInSize,
const size_t buffOutSize,
const size_t pledgedSrcSize)
size_t const bufferSpace = ZSTD_cwksp_alloc_size(buffInSize)
+ ZSTD_cwksp_alloc_size(buffOutSize);
- size_t const cctxSpace = ZSTD_cwksp_alloc_size(sizeof(ZSTD_CCtx));
+ size_t const cctxSpace = isStatic ? ZSTD_cwksp_alloc_size(sizeof(ZSTD_CCtx)) : 0;
size_t const neededSpace =
cctxSpace +
* be needed. However, we still allocate two 0-sized buffers, which can
* take space under ASAN. */
return ZSTD_estimateCCtxSize_usingCCtxParams_internal(
- &cParams, ¶ms->ldmParams, 0, 0, ZSTD_CONTENTSIZE_UNKNOWN);
+ &cParams, ¶ms->ldmParams, 1, 0, 0, ZSTD_CONTENTSIZE_UNKNOWN);
}
size_t ZSTD_estimateCCtxSize_usingCParams(ZSTD_compressionParameters cParams)
size_t const outBuffSize = ZSTD_compressBound(blockSize) + 1;
return ZSTD_estimateCCtxSize_usingCCtxParams_internal(
- &cParams, ¶ms->ldmParams, inBuffSize, outBuffSize,
+ &cParams, ¶ms->ldmParams, 1, inBuffSize, outBuffSize,
ZSTD_CONTENTSIZE_UNKNOWN);
}
}
{
size_t const neededSpace =
ZSTD_estimateCCtxSize_usingCCtxParams_internal(
- ¶ms.cParams, ¶ms.ldmParams,
+ ¶ms.cParams, ¶ms.ldmParams, zc->staticSize != 0,
buffInSize, buffOutSize, pledgedSrcSize);
int const workspaceTooSmall = ZSTD_cwksp_sizeof(ws) < neededSpace;