U32 HUF_getNbBitsFromCTable(HUF_CElt const* CTable, U32 symbolValue)
{
- const HUF_CElt* ct = CTable + 1;
+ const HUF_CElt* const ct = CTable + 1;
assert(symbolValue <= HUF_SYMBOLVALUE_MAX);
return (U32)HUF_getNbBits(ct[symbolValue]);
}
/**
* HUF_setMaxHeight():
- * Enforces maxNbBits on the Huffman tree described in huffNode.
+ * Try to enforce @targetNbBits on the Huffman tree described in @huffNode.
*
- * It sets all nodes with nbBits > maxNbBits to be maxNbBits. Then it adjusts
- * the tree to so that it is a valid canonical Huffman tree.
+ * It attempts to convert all nodes with nbBits > @targetNbBits
+ * to employ @targetNbBits instead. Then it adjusts the tree
+ * so that it remains a valid canonical Huffman tree.
*
* @pre The sum of the ranks of each symbol == 2^largestBits,
* where largestBits == huffNode[lastNonNull].nbBits.
* @post The sum of the ranks of each symbol == 2^largestBits,
- * where largestBits is the return value <= maxNbBits.
+ * where largestBits is the return value (expected <= targetNbBits).
*
- * @param huffNode The Huffman tree modified in place to enforce maxNbBits.
+ * @param huffNode The Huffman tree modified in place to enforce targetNbBits.
+ * It's presumed sorted, from most frequent to rarest symbol.
* @param lastNonNull The symbol with the lowest count in the Huffman tree.
- * @param maxNbBits The maximum allowed number of bits, which the Huffman tree
+ * @param targetNbBits The allowed number of bits, which the Huffman tree
* may not respect. After this function the Huffman tree will
- * respect maxNbBits.
- * @return The maximum number of bits of the Huffman tree after adjustment,
- * necessarily no more than maxNbBits.
+ * respect targetNbBits.
+ * @return The maximum number of bits of the Huffman tree after adjustment.
*/
-static U32 HUF_setMaxHeight(nodeElt* huffNode, U32 lastNonNull, U32 maxNbBits)
+static U32 HUF_setMaxHeight(nodeElt* huffNode, U32 lastNonNull, U32 targetNbBits)
{
const U32 largestBits = huffNode[lastNonNull].nbBits;
- /* early exit : no elt > maxNbBits, so the tree is already valid. */
- if (largestBits <= maxNbBits) return largestBits;
+ /* early exit : no elt > targetNbBits, so the tree is already valid. */
+ if (largestBits <= targetNbBits) return largestBits;
+
+ DEBUGLOG(5, "HUF_setMaxHeight (targetNbBits = %u)", targetNbBits);
/* there are several too large elements (at least >= 2) */
{ int totalCost = 0;
- const U32 baseCost = 1 << (largestBits - maxNbBits);
+ const U32 baseCost = 1 << (largestBits - targetNbBits);
int n = (int)lastNonNull;
- /* Adjust any ranks > maxNbBits to maxNbBits.
+ /* Adjust any ranks > targetNbBits to targetNbBits.
* Compute totalCost, which is how far the sum of the ranks is
* we are over 2^largestBits after adjust the offending ranks.
*/
- while (huffNode[n].nbBits > maxNbBits) {
+ while (huffNode[n].nbBits > targetNbBits) {
totalCost += baseCost - (1 << (largestBits - huffNode[n].nbBits));
- huffNode[n].nbBits = (BYTE)maxNbBits;
+ huffNode[n].nbBits = (BYTE)targetNbBits;
n--;
}
- /* n stops at huffNode[n].nbBits <= maxNbBits */
- assert(huffNode[n].nbBits <= maxNbBits);
- /* n end at index of smallest symbol using < maxNbBits */
- while (huffNode[n].nbBits == maxNbBits) --n;
+ /* n stops at huffNode[n].nbBits <= targetNbBits */
+ assert(huffNode[n].nbBits <= targetNbBits);
+ /* n end at index of smallest symbol using < targetNbBits */
+ while (huffNode[n].nbBits == targetNbBits) --n;
- /* renorm totalCost from 2^largestBits to 2^maxNbBits
+ /* renorm totalCost from 2^largestBits to 2^targetNbBits
* note : totalCost is necessarily a multiple of baseCost */
assert((totalCost & (baseCost - 1)) == 0);
- totalCost >>= (largestBits - maxNbBits);
+ totalCost >>= (largestBits - targetNbBits);
assert(totalCost > 0);
/* repay normalized cost */
/* Get pos of last (smallest = lowest cum. count) symbol per rank */
ZSTD_memset(rankLast, 0xF0, sizeof(rankLast));
- { U32 currentNbBits = maxNbBits;
+ { U32 currentNbBits = targetNbBits;
int pos;
for (pos=n ; pos >= 0; pos--) {
if (huffNode[pos].nbBits >= currentNbBits) continue;
- currentNbBits = huffNode[pos].nbBits; /* < maxNbBits */
- rankLast[maxNbBits-currentNbBits] = (U32)pos;
+ currentNbBits = huffNode[pos].nbBits; /* < targetNbBits */
+ rankLast[targetNbBits-currentNbBits] = (U32)pos;
} }
while (totalCost > 0) {
rankLast[nBitsToDecrease] = noSymbol;
else {
rankLast[nBitsToDecrease]--;
- if (huffNode[rankLast[nBitsToDecrease]].nbBits != maxNbBits-nBitsToDecrease)
+ if (huffNode[rankLast[nBitsToDecrease]].nbBits != targetNbBits-nBitsToDecrease)
rankLast[nBitsToDecrease] = noSymbol; /* this rank is now empty */
}
} /* while (totalCost > 0) */
* TODO.
*/
while (totalCost < 0) { /* Sometimes, cost correction overshoot */
- /* special case : no rank 1 symbol (using maxNbBits-1);
- * let's create one from largest rank 0 (using maxNbBits).
+ /* special case : no rank 1 symbol (using targetNbBits-1);
+ * let's create one from largest rank 0 (using targetNbBits).
*/
if (rankLast[1] == noSymbol) {
- while (huffNode[n].nbBits == maxNbBits) n--;
+ while (huffNode[n].nbBits == targetNbBits) n--;
huffNode[n+1].nbBits--;
assert(n >= 0);
rankLast[1] = (U32)(n+1);
} /* repay normalized cost */
} /* there are several too large elements (at least >= 2) */
- return maxNbBits;
+ return targetNbBits;
}
typedef struct {
assert(HUF_isSorted(huffNode, maxSymbolValue1));
}
+
+size_t showHNodeSymbols(const nodeElt* hnode, size_t size)
+{
+ size_t u;
+ for (u=0; u<size; u++) {
+ RAWLOG(6, " %u", hnode[u].byte);
+ }
+ RAWLOG(6, " \n");
+ return size;
+}
+
+size_t showHNodeBits(const nodeElt* hnode, size_t size)
+{
+ size_t u;
+ for (u=0; u<size; u++) {
+ RAWLOG(6, " %u", hnode[u].nbBits);
+ }
+ RAWLOG(6, " \n");
+ return size;
+}
+
+
/** HUF_buildCTable_wksp() :
* Same as HUF_buildCTable(), but using externally allocated scratch buffer.
* `workSpace` must be aligned on 4-bytes boundaries, and be at least as large as sizeof(HUF_buildCTable_wksp_tables).
int lowS, lowN;
int nodeNb = STARTNODE;
int n, nodeRoot;
+ DEBUGLOG(5, "HUF_buildTree (alphabet size = %u)", maxSymbolValue + 1);
/* init for parents */
nonNullRank = (int)maxSymbolValue;
while(huffNode[nonNullRank].count == 0) nonNullRank--;
for (n=0; n<=nonNullRank; n++)
huffNode[n].nbBits = huffNode[ huffNode[n].parent ].nbBits + 1;
+ DEBUGLOG(6, "Initial distribution of bits completed (%zu sorted symbols)", showHNodeBits(huffNode, maxSymbolValue+1)); (void)showHNodeBits;
+
return nonNullRank;
}
CTable[0] = maxNbBits;
}
-size_t HUF_buildCTable_wksp (HUF_CElt* CTable, const unsigned* count, U32 maxSymbolValue, U32 maxNbBits, void* workSpace, size_t wkspSize)
+size_t
+HUF_buildCTable_wksp(HUF_CElt* CTable, const unsigned* count, U32 maxSymbolValue, U32 maxNbBits,
+ void* workSpace, size_t wkspSize)
{
- HUF_buildCTable_wksp_tables* const wksp_tables = (HUF_buildCTable_wksp_tables*)HUF_alignUpWorkspace(workSpace, &wkspSize, ZSTD_ALIGNOF(U32));
+ HUF_buildCTable_wksp_tables* const wksp_tables =
+ (HUF_buildCTable_wksp_tables*)HUF_alignUpWorkspace(workSpace, &wkspSize, ZSTD_ALIGNOF(U32));
nodeElt* const huffNode0 = wksp_tables->huffNodeTbl;
nodeElt* const huffNode = huffNode0+1;
int nonNullRank;
+ DEBUGLOG(5, "HUF_buildCTable_wksp (alphabet size = %u)", maxSymbolValue+1);
+
/* safety checks */
if (wkspSize < sizeof(HUF_buildCTable_wksp_tables))
- return ERROR(workSpace_tooSmall);
+ return ERROR(workSpace_tooSmall);
if (maxNbBits == 0) maxNbBits = HUF_TABLELOG_DEFAULT;
if (maxSymbolValue > HUF_SYMBOLVALUE_MAX)
- return ERROR(maxSymbolValue_tooLarge);
+ return ERROR(maxSymbolValue_tooLarge);
ZSTD_memset(huffNode0, 0, sizeof(huffNodeTable));
/* sort, decreasing order */
HUF_sort(huffNode, count, maxSymbolValue, wksp_tables->rankPosition);
+ DEBUGLOG(6, "sorted symbols completed (%zu symbols)", showHNodeSymbols(huffNode, maxSymbolValue+1)); (void)showHNodeSymbols;
/* build tree */
nonNullRank = HUF_buildTree(huffNode, maxSymbolValue);
- /* enforce maxTableLog */
+ /* determine and enforce maxTableLog */
maxNbBits = HUF_setMaxHeight(huffNode, (U32)nonNullRank, maxNbBits);
if (maxNbBits > HUF_TABLELOG_MAX) return ERROR(GENERIC); /* check fit into table */
return (size_t)(op-ostart);
}
+static size_t showU32(const U32* arr, size_t size)
+{
+ size_t u;
+ for (u=0; u<size; u++) {
+ RAWLOG(6, " %u", arr[u]);
+ }
+ RAWLOG(6, " \n");
+ return size;
+}
+
+static size_t showCTableBits(const HUF_CElt* ctable, size_t size)
+{
+ size_t u;
+ for (u=0; u<size; u++) {
+ RAWLOG(6, " %zu", HUF_getNbBits(ctable[u]));
+ }
+ RAWLOG(6, " \n");
+ return size;
+
+}
+
+
typedef struct {
unsigned count[HUF_SYMBOLVALUE_MAX + 1];
HUF_CElt CTable[HUF_CTABLE_SIZE_ST(HUF_SYMBOLVALUE_MAX)];
BYTE* const oend = ostart + dstSize;
BYTE* op = ostart;
+ DEBUGLOG(5, "HUF_compress_internal (srcSize=%zu)", srcSize);
HUF_STATIC_ASSERT(sizeof(*table) + HUF_WORKSPACE_MAX_ALIGNMENT <= HUF_WORKSPACE_SIZE);
/* checks & inits */
DEBUG_STATIC_ASSERT(SUSPECT_INCOMPRESSIBLE_SAMPLE_RATIO >= 2);
if (suspectUncompressible && srcSize >= (SUSPECT_INCOMPRESSIBLE_SAMPLE_SIZE * SUSPECT_INCOMPRESSIBLE_SAMPLE_RATIO)) {
size_t largestTotal = 0;
+ DEBUGLOG(5, "input suspected incompressible : sampling to check");
{ unsigned maxSymbolValueBegin = maxSymbolValue;
CHECK_V_F(largestBegin, HIST_count_simple (table->count, &maxSymbolValueBegin, (const BYTE*)src, SUSPECT_INCOMPRESSIBLE_SAMPLE_SIZE) );
largestTotal += largestBegin;
if (largest == srcSize) { *ostart = ((const BYTE*)src)[0]; return 1; } /* single symbol, rle */
if (largest <= (srcSize >> 7)+4) return 0; /* heuristic : probably not compressible enough */
}
+ DEBUGLOG(6, "histogram detail completed (%zu symbols)", showU32(table->count, maxSymbolValue+1)); (void)showU32;
/* Check validity of previous table */
if ( repeat
&table->wksps.buildCTable_wksp, sizeof(table->wksps.buildCTable_wksp));
CHECK_F(maxBits);
huffLog = (U32)maxBits;
+ DEBUGLOG(6, "bit distribution completed (%zu symbols)", showCTableBits(table->CTable + 1, maxSymbolValue+1)); (void)showCTableBits;
}
/* Zero unused symbols in CTable, so we can check it for validity */
{
HUF_CElt* hufTable, HUF_repeat* repeat, int preferRepeat,
int bmi2, unsigned suspectUncompressible)
{
+ DEBUGLOG(5, "HUF_compress1X_repeat (srcSize = %zu)", srcSize);
return HUF_compress_internal(dst, dstSize, src, srcSize,
maxSymbolValue, huffLog, HUF_singleStream,
workSpace, wkspSize, hufTable,
unsigned maxSymbolValue, unsigned huffLog,
void* workSpace, size_t wkspSize)
{
+ DEBUGLOG(5, "HUF_compress4X_wksp (srcSize = %zu)", srcSize);
return HUF_compress_internal(dst, dstSize, src, srcSize,
maxSymbolValue, huffLog, HUF_fourStreams,
workSpace, wkspSize,
void* workSpace, size_t wkspSize,
HUF_CElt* hufTable, HUF_repeat* repeat, int preferRepeat, int bmi2, unsigned suspectUncompressible)
{
+ DEBUGLOG(5, "HUF_compress4X_repeat (srcSize = %zu)", srcSize);
return HUF_compress_internal(dst, dstSize, src, srcSize,
maxSymbolValue, huffLog, HUF_fourStreams,
workSpace, wkspSize,
}
ZSTD_memcpy(ostart + flSize, src, srcSize);
- DEBUGLOG(5, "Raw literals: %u -> %u", (U32)srcSize, (U32)(srcSize + flSize));
+ DEBUGLOG(5, "Raw (uncompressed) literals: %u -> %u", (U32)srcSize, (U32)(srcSize + flSize));
return srcSize + flSize;
}
return flSize+1;
}
+static size_t showHexa(const void* src, size_t srcSize)
+{
+ const BYTE* const ip = (const BYTE*)src;
+ size_t u;
+ for (u=0; u<srcSize; u++) {
+ RAWLOG(6, " %02X", ip[u]);
+ }
+ RAWLOG(6, " \n");
+ return srcSize;
+}
+
size_t ZSTD_compressLiterals (ZSTD_hufCTables_t const* prevHuf,
ZSTD_hufCTables_t* nextHuf,
ZSTD_strategy strategy, int disableLiteralCompression,
DEBUGLOG(5,"ZSTD_compressLiterals (disableLiteralCompression=%i srcSize=%u)",
disableLiteralCompression, (U32)srcSize);
+ DEBUGLOG(6, "Completed literals listing (%zu bytes)", showHexa(src, srcSize)); (void)showHexa;
+
/* Prepare nextEntropy assuming reusing the existing table */
ZSTD_memcpy(nextHuf, prevHuf, sizeof(*prevHuf));
RETURN_ERROR_IF(dstCapacity < lhSize+1, dstSize_tooSmall, "not enough space for compression");
{ HUF_repeat repeat = prevHuf->repeatMode;
- int const preferRepeat = strategy < ZSTD_lazy ? srcSize <= 1024 : 0;
+ int const preferRepeat = (strategy < ZSTD_lazy) ? srcSize <= 1024 : 0;
+ typedef size_t (*huf_compress_f)(void*, size_t, const void*, size_t, unsigned, unsigned, void*, size_t, HUF_CElt*, HUF_repeat*, int, int, unsigned);
+ huf_compress_f huf_compress;
if (repeat == HUF_repeat_valid && lhSize == 3) singleStream = 1;
- cLitSize = singleStream ?
- HUF_compress1X_repeat(
- ostart+lhSize, dstCapacity-lhSize, src, srcSize,
- HUF_SYMBOLVALUE_MAX, HUF_TABLELOG_DEFAULT, entropyWorkspace, entropyWorkspaceSize,
- (HUF_CElt*)nextHuf->CTable, &repeat, preferRepeat, bmi2, suspectUncompressible) :
- HUF_compress4X_repeat(
- ostart+lhSize, dstCapacity-lhSize, src, srcSize,
- HUF_SYMBOLVALUE_MAX, HUF_TABLELOG_DEFAULT, entropyWorkspace, entropyWorkspaceSize,
- (HUF_CElt*)nextHuf->CTable, &repeat, preferRepeat, bmi2, suspectUncompressible);
+ huf_compress = singleStream ? HUF_compress1X_repeat : HUF_compress4X_repeat;
+ cLitSize = huf_compress(ostart+lhSize, dstCapacity-lhSize,
+ src, srcSize,
+ HUF_SYMBOLVALUE_MAX, HUF_TABLELOG_DEFAULT,
+ entropyWorkspace, entropyWorkspaceSize,
+ (HUF_CElt*)nextHuf->CTable,
+ &repeat, preferRepeat,
+ bmi2, suspectUncompressible);
if (repeat != HUF_repeat_none) {
/* reused the existing table */
DEBUGLOG(5, "Reusing previous huffman table");