return FSE_read32(memPtr);
else
{
- const BYTE* p = memPtr;
+ const BYTE* p = (const BYTE*)memPtr;
return (U32)((U32)p[0] + ((U32)p[1]<<8) + ((U32)p[2]<<16) + ((U32)p[3]<<24));
}
}
}
else
{
- BYTE* p = memPtr;
+ BYTE* p = (BYTE*)memPtr;
p[0] = (BYTE)val32;
p[1] = (BYTE)(val32>>8);
p[2] = (BYTE)(val32>>16);
return FSE_read64(memPtr);
else
{
- const BYTE* p = memPtr;
+ const BYTE* p = (const BYTE*)memPtr;
return (U64)((U64)p[0] + ((U64)p[1]<<8) + ((U64)p[2]<<16) + ((U64)p[3]<<24)
+ ((U64)p[4]<<32) + ((U64)p[5]<<40) + ((U64)p[6]<<48) + ((U64)p[7]<<56));
}
}
else
{
- BYTE* p = memPtr;
+ BYTE* p = (BYTE*)memPtr;
p[0] = (BYTE)val64;
p[1] = (BYTE)(val64>>8);
p[2] = (BYTE)(val64>>16);
int FSE_compareRankT(const void* r1, const void* r2)
{
- const rank_t* R1 = r1;
- const rank_t* R2 = r2;
+ const rank_t* R1 = (const rank_t*)r1;
+ const rank_t* R2 = (const rank_t*)r2;
return 2 * (R1->count < R2->count) - 1;
}
size_t FSE_buildDTable_rle (void* DTable, BYTE symbolValue)
{
- U32* const base32 = DTable;
+ U32* const base32 = (U32* const)DTable;
FSE_decode_t* const cell = (FSE_decode_t*)(base32 + 1);
/* Sanity check */
size_t FSE_buildDTable_raw (void* DTable, unsigned nbBits)
{
- U32* const base32 = DTable;
+ U32* const base32 = (U32* const)DTable;
FSE_decode_t* dinfo = (FSE_decode_t*)(base32 + 1);
const unsigned tableSize = 1 << nbBits;
const unsigned tableMask = tableSize - 1;
void FSE_initDState(FSE_DState_t* DStatePtr, FSE_DStream_t* bitD, const void* DTable)
{
- const U32* const base32 = DTable;
+ const U32* const base32 = (const U32* const)DTable;
DStatePtr->state = FSE_readBits(bitD, base32[0]);
FSE_reloadDStream(bitD);
DStatePtr->table = base32 + 1;
size_t FSE_FUNCTION_NAME(FSE_buildDTable, FSE_FUNCTION_EXTENSION)
(void* DTable, const short* const normalizedCounter, unsigned maxSymbolValue, unsigned tableLog)
{
- U32* const base32 = DTable;
+ U32* const base32 = (U32* const)DTable;
FSE_DECODE_TYPE* const tableDecode = (FSE_DECODE_TYPE*) (base32+1);
const U32 tableSize = 1 << tableLog;
const U32 tableMask = tableSize-1;
static void ZSTD_wildcopy(void* dst, const void* src, size_t length)
{
- const BYTE* ip = src;
- BYTE* op = dst;
+ const BYTE* ip = (const BYTE*)src;
+ BYTE* op = (BYTE*)dst;
BYTE* const oend = op + length;
while (op < oend) COPY8(op, ip);
}
static size_t ZSTD_writeProgressive(void* ptr, size_t value)
{
- BYTE* const bStart = ptr;
+ BYTE* const bStart = (BYTE* const)ptr;
BYTE* byte = bStart;
do
static size_t ZSTD_readProgressive(size_t* result, const void* ptr)
{
- const BYTE* const bStart = ptr;
+ const BYTE* const bStart = (const BYTE* const)ptr;
const BYTE* byte = bStart;
size_t r = 0;
U32 shift = 0;
void ZSTD_resetCCtx(ZSTD_cctx_t ctx)
{
- refTables_t* srt = ctx;
+ refTables_t* srt = (refTables_t*)ctx;
srt->base = NULL;
memset(srt->hashTable, 0, HASH_TABLESIZE*4);
}
static size_t ZSTD_compressRle (void* dst, size_t maxDstSize, const void* src, size_t srcSize)
{
- BYTE* const ostart = dst;
+ BYTE* const ostart = (BYTE* const)dst;
/* at this stage : dstSize >= FSE_compressBound(srcSize) > (ZSTD_blockHeaderSize+1) (checked by ZSTD_compressLiterals()) */
(void)maxDstSize;
static size_t ZSTD_noCompressBlock (void* dst, size_t maxDstSize, const void* src, size_t srcSize)
{
- BYTE* const ostart = dst;
+ BYTE* const ostart = (BYTE* const)dst;
if (srcSize + ZSTD_blockHeaderSize > maxDstSize) return (size_t)-ZSTD_ERROR_maxDstSize_tooSmall;
memcpy(ostart + ZSTD_blockHeaderSize, src, srcSize);
const void* src, size_t srcSize,
const void* CTable)
{
- const BYTE* const istart = (const BYTE*) src;
+ const BYTE* const istart = (const BYTE*)src;
const BYTE* ip = istart;
const BYTE* const iend = istart + srcSize;
FSE_CStream_t bitC;
op_l += lastLLSize;
/* Finale compression stage */
- return ZSTD_compressEntropy(dst, maxDstSize,
+ return ZSTD_compressEntropy((BYTE*)dst, maxDstSize,
op_l_start, op_l, op_rl_start, op_rl, op_ml_start, op_offset_start, op_dumps_start, op_dumps,
srcSize, lastLLSize);
}
size_t ZSTD_compressContinue(ZSTD_cctx_t cctx, void* dst, size_t maxDstSize, const void* src, size_t srcSize)
{
refTables_t* ctx = (refTables_t*) cctx;
- const BYTE* const istart = src;
+ const BYTE* const istart = (const BYTE* const)src;
const BYTE* ip = istart;
- BYTE* const ostart = dst;
+ BYTE* const ostart = (BYTE* const)dst;
BYTE* op = ostart;
//U32 limit = 4 * BLOCKSIZE;
//const U32 updateRate = 2 * BLOCKSIZE;
/* Init */
- if (ctx->base==NULL) ctx->base = src, ctx->current=0;
+ if (ctx->base==NULL) ctx->base = (const BYTE*)src, ctx->current=0;
if (src != ctx->base + ctx->current) /* not contiguous */
{
ZSTD_resetCCtx(ctx);
- ctx->base = src;
+ ctx->base = (const BYTE*)src;
ctx->current = 0;
}
ctx->current += (U32)srcSize;
size_t ZSTD_compressEnd(ZSTD_cctx_t ctx, void* dst, size_t maxDstSize)
{
- BYTE* op = dst;
+ BYTE* op = (BYTE*)dst;
// Sanity check
(void)ctx;
static size_t ZSTD_compressCCtx(void* ctx, void* dst, size_t maxDstSize, const void* src, size_t srcSize)
{
- BYTE* const ostart = dst;
+ BYTE* const ostart = (BYTE* const)dst;
BYTE* op = ostart;
/* Header */
size_t ZSTD_getcBlockSize(const void* src, size_t srcSize, blockProperties_t* bpPtr)
{
- const BYTE* const in = src;
+ const BYTE* const in = (const BYTE* const)src;
BYTE headerFlags;
U32 cSize;
headerFlags = *in;
cSize = in[2] + (in[1]<<8) + ((in[0] & 7)<<16);
- bpPtr->blockType = headerFlags >> 6;
+ bpPtr->blockType = (blockType_t)(headerFlags >> 6);
bpPtr->origSize = (bpPtr->blockType == bt_rle) ? cSize : 0;
if (bpPtr->blockType == bt_end) return 0;
const void* src, size_t srcSize)
{
/* assumed : blockType == blockCompressed */
- const BYTE* ip = src;
+ const BYTE* ip = (const BYTE*)src;
short norm[256];
void* DTable = ctx;
U32 maxSymbolValue = 255;
const BYTE** litPtr,
const void* src, size_t srcSize)
{
- const BYTE* const istart = src;
+ const BYTE* const istart = (const BYTE* const)src;
const BYTE* ip = istart;
- BYTE* const ostart = dst;
+ BYTE* const ostart = (BYTE* const)dst;
BYTE* const oend = ostart + maxDstSize;
blockProperties_t litbp;
void* DTableLL, void* DTableML, void* DTableOffb,
const void* src, size_t srcSize)
{
- const BYTE* const istart = src;
+ const BYTE* const istart = (const BYTE* const)src;
const BYTE* ip = istart;
const BYTE* const iend = istart + srcSize;
U32 LLtype, Offtype, MLtype;
FORCE_INLINE size_t ZSTD_decompressBlock(void* ctx, void* dst, size_t maxDstSize,
const void* src, size_t srcSize)
{
- const BYTE* ip = src;
+ const BYTE* ip = (const BYTE*)src;
const BYTE* const iend = ip + srcSize;
- BYTE* const ostart = dst;
+ BYTE* const ostart = (BYTE* const)dst;
BYTE* op = ostart;
BYTE* const oend = ostart + maxDstSize;
size_t errorCode;
static size_t ZSTD_decompressDCtx(void* ctx, void* dst, size_t maxDstSize, const void* src, size_t srcSize)
{
- const BYTE* ip = src;
+ const BYTE* ip = (const BYTE*)src;
const BYTE* iend = ip + srcSize;
- BYTE* const ostart = dst;
+ BYTE* const ostart = (BYTE* const)dst;
BYTE* op = ostart;
BYTE* const oend = ostart + maxDstSize;
size_t remainingSize = srcSize;
ZSTD_dctx_t ZSTD_createDCtx(void)
{
- dctx_t* dctx = malloc(sizeof(dctx_t));
+ dctx_t* dctx = (dctx_t*)malloc(sizeof(dctx_t));
dctx->expected = 4 + ZSTD_blockHeaderSize; // Frame Header + Block Header
dctx->started = 0;
return (ZSTD_dctx_t)dctx;
// Prepare next block
{
- const BYTE* header = src;
+ const BYTE* header = (const BYTE*)src;
blockProperties_t bp;
size_t blockSize;
header += cSize;