]> git.ipfire.org Git - thirdparty/zstd.git/commitdiff
codemod: repcodes_t -> Repcodes_t
authorYann Collet <cyan@fb.com>
Wed, 11 Dec 2024 00:00:20 +0000 (16:00 -0800)
committerYann Collet <cyan@fb.com>
Fri, 20 Dec 2024 18:36:57 +0000 (10:36 -0800)
lib/compress/zstd_compress.c
lib/compress/zstd_compress_internal.h
lib/compress/zstd_compress_superblock.c
lib/compress/zstd_opt.c

index 029928fb2c407ce089e4ba3299e097d1ae81f22e..03806c2340ccef7e413779c061670ed3830bc058 100644 (file)
@@ -3427,7 +3427,7 @@ static size_t ZSTD_copyBlockSequences(SeqCollector* seqCollector, const SeqStore
     ZSTD_Sequence* outSeqs = seqCollector->seqIndex == 0 ? seqCollector->seqStart : seqCollector->seqStart + seqCollector->seqIndex;
     const size_t nbOutSequences = nbInSequences + 1;
     size_t nbOutLiterals = 0;
-    repcodes_t repcodes;
+    Repcodes_t repcodes;
     size_t i;
 
     /* Bounds check that we have enough space for every input sequence
@@ -4059,7 +4059,7 @@ ZSTD_resolveRepcodeToRawOffset(const U32 rep[ZSTD_REP_NUM], const U32 offBase, c
  *        4+ : real_offset+3
  */
 static void
-ZSTD_seqStore_resolveOffCodes(repcodes_t* const dRepcodes, repcodes_t* const cRepcodes,
+ZSTD_seqStore_resolveOffCodes(Repcodes_t* const dRepcodes, Repcodes_t* const cRepcodes,
                         const SeqStore_t* const seqStore, U32 const nbSeq)
 {
     U32 idx = 0;
@@ -4096,7 +4096,7 @@ ZSTD_seqStore_resolveOffCodes(repcodes_t* const dRepcodes, repcodes_t* const cRe
 static size_t
 ZSTD_compressSeqStore_singleBlock(ZSTD_CCtx* zc,
                             const SeqStore_t* const seqStore,
-                                  repcodes_t* const dRep, repcodes_t* const cRep,
+                                  Repcodes_t* const dRep, Repcodes_t* const cRep,
                                   void* dst, size_t dstCapacity,
                             const void* src, size_t srcSize,
                                   U32 lastBlock, U32 isPartition)
@@ -4108,7 +4108,7 @@ ZSTD_compressSeqStore_singleBlock(ZSTD_CCtx* zc,
     size_t cSeqsSize;
 
     /* In case of an RLE or raw block, the simulated decompression repcode history must be reset */
-    repcodes_t const dRepOriginal = *dRep;
+    Repcodes_t const dRepOriginal = *dRep;
     DEBUGLOG(5, "ZSTD_compressSeqStore_singleBlock");
     if (isPartition)
         ZSTD_seqStore_resolveOffCodes(dRep, cRep, seqStore, (U32)(seqStore->sequences - seqStore->sequencesStart));
@@ -4279,10 +4279,10 @@ ZSTD_compressBlock_splitBlock_internal(ZSTD_CCtx* zc,
      *
      * See ZSTD_seqStore_resolveOffCodes() for more details.
      */
-    repcodes_t dRep;
-    repcodes_t cRep;
-    ZSTD_memcpy(dRep.rep, zc->blockState.prevCBlock->rep, sizeof(repcodes_t));
-    ZSTD_memcpy(cRep.rep, zc->blockState.prevCBlock->rep, sizeof(repcodes_t));
+    Repcodes_t dRep;
+    Repcodes_t cRep;
+    ZSTD_memcpy(dRep.rep, zc->blockState.prevCBlock->rep, sizeof(Repcodes_t));
+    ZSTD_memcpy(cRep.rep, zc->blockState.prevCBlock->rep, sizeof(Repcodes_t));
     ZSTD_memset(nextSeqStore, 0, sizeof(SeqStore_t));
 
     DEBUGLOG(5, "ZSTD_compressBlock_splitBlock_internal (dstCapacity=%u, dictLimit=%u, nextToUpdate=%u)",
@@ -4338,7 +4338,7 @@ ZSTD_compressBlock_splitBlock_internal(ZSTD_CCtx* zc,
     /* cRep and dRep may have diverged during the compression.
      * If so, we use the dRep repcodes for the next block.
      */
-    ZSTD_memcpy(zc->blockState.prevCBlock->rep, dRep.rep, sizeof(repcodes_t));
+    ZSTD_memcpy(zc->blockState.prevCBlock->rep, dRep.rep, sizeof(Repcodes_t));
     return cSize;
 }
 
@@ -6617,21 +6617,22 @@ static U32 ZSTD_finalizeOffBase(U32 rawOffset, const U32 rep[ZSTD_REP_NUM], U32
 
 /* This function scans through an array of ZSTD_Sequence,
  * storing the sequences it reads, until it reaches a block delimiter.
- * Note that the block delimiter must include the last literals of the block.
+ * Note that the block delimiter includes the last literals of the block.
+ * @blockSize must be == sum(sequence_lengths).
  * @returns 0 on success, and a ZSTD_error otherwise.
  */
 static size_t
 ZSTD_transferSequences_wBlockDelim(ZSTD_CCtx* cctx,
-                                               ZSTD_SequencePosition* seqPos,
-                                         const ZSTD_Sequence* const inSeqs, size_t inSeqsSize,
-                                         const void* src, size_t blockSize,
-                                               ZSTD_paramSwitch_e externalRepSearch)
+                                   ZSTD_SequencePosition* seqPos,
+                             const ZSTD_Sequence* const inSeqs, size_t inSeqsSize,
+                             const void* src, size_t blockSize,
+                                   ZSTD_paramSwitch_e externalRepSearch)
 {
     U32 idx = seqPos->idx;
     U32 const startIdx = idx;
     BYTE const* ip = (BYTE const*)(src);
     const BYTE* const iend = ip + blockSize;
-    repcodes_t updatedRepcodes;
+    Repcodes_t updatedRepcodes;
     U32 dictSize;
 
     DEBUGLOG(5, "ZSTD_transferSequences_wBlockDelim (blockSize = %zu)", blockSize);
@@ -6643,7 +6644,7 @@ ZSTD_transferSequences_wBlockDelim(ZSTD_CCtx* cctx,
     } else {
         dictSize = 0;
     }
-    ZSTD_memcpy(updatedRepcodes.rep, cctx->blockState.prevCBlock->rep, sizeof(repcodes_t));
+    ZSTD_memcpy(updatedRepcodes.rep, cctx->blockState.prevCBlock->rep, sizeof(Repcodes_t));
     for (; idx < inSeqsSize && (inSeqs[idx].matchLength != 0 || inSeqs[idx].offset != 0); ++idx) {
         U32 const litLength = inSeqs[idx].litLength;
         U32 const matchLength = inSeqs[idx].matchLength;
@@ -6695,7 +6696,7 @@ ZSTD_transferSequences_wBlockDelim(ZSTD_CCtx* cctx,
         }
     }
 
-    ZSTD_memcpy(cctx->blockState.nextCBlock->rep, updatedRepcodes.rep, sizeof(repcodes_t));
+    ZSTD_memcpy(cctx->blockState.nextCBlock->rep, updatedRepcodes.rep, sizeof(Repcodes_t));
 
     if (inSeqs[idx].litLength) {
         DEBUGLOG(6, "Storing last literals of size: %u", inSeqs[idx].litLength);
@@ -6708,24 +6709,25 @@ ZSTD_transferSequences_wBlockDelim(ZSTD_CCtx* cctx,
     return 0;
 }
 
-/* Returns the number of bytes to move the current read position back by.
- * Only non-zero if we ended up splitting a sequence.
- * Otherwise, it may return a ZSTD error if something went wrong.
- *
- * This function will attempt to scan through blockSize bytes
+/*
+ * This function attempts to scan through blockSize bytes
  * represented by the sequences in @inSeqs,
  * storing any (partial) sequences.
  *
+ * @returns the number of bytes to move the current read position back by.
+ * Only non-zero if we ended up splitting a sequence.
+ * Otherwise, it may return a ZSTD error if something went wrong.
+ *
  * Occasionally, we may want to change the actual number of bytes we consumed from inSeqs to
  * avoid splitting a match, or to avoid splitting a match such that it would produce a match
  * smaller than MINMATCH. In this case, we return the number of bytes that we didn't read from this block.
  */
 static size_t
-ZSTD_copySequencesToSeqStoreNoBlockDelim(ZSTD_CCtx* cctx,
-                                         ZSTD_SequencePosition* seqPos,
-                                   const ZSTD_Sequence* const inSeqs, size_t inSeqsSize,
-                                   const void* src, size_t blockSize,
-                                         ZSTD_paramSwitch_e externalRepSearch)
+ZSTD_transferSequences_noDelim(ZSTD_CCtx* cctx,
+                               ZSTD_SequencePosition* seqPos,
+                         const ZSTD_Sequence* const inSeqs, size_t inSeqsSize,
+                         const void* src, size_t blockSize,
+                               ZSTD_paramSwitch_e externalRepSearch)
 {
     U32 idx = seqPos->idx;
     U32 startPosInSequence = seqPos->posInSequence;
@@ -6733,7 +6735,7 @@ ZSTD_copySequencesToSeqStoreNoBlockDelim(ZSTD_CCtx* cctx,
     size_t dictSize;
     BYTE const* ip = (BYTE const*)(src);
     BYTE const* iend = ip + blockSize;  /* May be adjusted if we decide to process fewer than blockSize bytes */
-    repcodes_t updatedRepcodes;
+    Repcodes_t updatedRepcodes;
     U32 bytesAdjustment = 0;
     U32 finalMatchSplit = 0;
 
@@ -6747,9 +6749,9 @@ ZSTD_copySequencesToSeqStoreNoBlockDelim(ZSTD_CCtx* cctx,
     } else {
         dictSize = 0;
     }
-    DEBUGLOG(5, "ZSTD_copySequencesToSeqStoreNoBlockDelim: idx: %u PIS: %u blockSize: %zu", idx, startPosInSequence, blockSize);
+    DEBUGLOG(5, "ZSTD_transferSequences_noDelim: idx: %u PIS: %u blockSize: %zu", idx, startPosInSequence, blockSize);
     DEBUGLOG(5, "Start seq: idx: %u (of: %u ml: %u ll: %u)", idx, inSeqs[idx].offset, inSeqs[idx].matchLength, inSeqs[idx].litLength);
-    ZSTD_memcpy(updatedRepcodes.rep, cctx->blockState.prevCBlock->rep, sizeof(repcodes_t));
+    ZSTD_memcpy(updatedRepcodes.rep, cctx->blockState.prevCBlock->rep, sizeof(Repcodes_t));
     while (endPosInSequence && idx < inSeqsSize && !finalMatchSplit) {
         const ZSTD_Sequence currSeq = inSeqs[idx];
         U32 litLength = currSeq.litLength;
@@ -6830,7 +6832,7 @@ ZSTD_copySequencesToSeqStoreNoBlockDelim(ZSTD_CCtx* cctx,
     assert(idx == inSeqsSize || endPosInSequence <= inSeqs[idx].litLength + inSeqs[idx].matchLength);
     seqPos->idx = idx;
     seqPos->posInSequence = endPosInSequence;
-    ZSTD_memcpy(cctx->blockState.nextCBlock->rep, updatedRepcodes.rep, sizeof(repcodes_t));
+    ZSTD_memcpy(cctx->blockState.nextCBlock->rep, updatedRepcodes.rep, sizeof(Repcodes_t));
 
     iend -= bytesAdjustment;
     if (ip != iend) {
@@ -6855,7 +6857,7 @@ static ZSTD_SequenceCopier_f ZSTD_selectSequenceCopier(ZSTD_SequenceFormat_e mod
         return ZSTD_transferSequences_wBlockDelim;
     }
     assert(mode == ZSTD_sf_noBlockDelimiters);
-    return ZSTD_copySequencesToSeqStoreNoBlockDelim;
+    return ZSTD_transferSequences_noDelim;
 }
 
 /* Discover the size of next block by searching for the delimiter.
index 91066a4be703ecde733704e6bd5ea69c69796808..2387d9c2869c7b97c1d877b607d070678c3f4b8f 100644 (file)
@@ -849,12 +849,12 @@ ZSTD_updateRep(U32 rep[ZSTD_REP_NUM], U32 const offBase, U32 const ll0)
 
 typedef struct repcodes_s {
     U32 rep[3];
-} repcodes_t;
+} Repcodes_t;
 
-MEM_STATIC repcodes_t
+MEM_STATIC Repcodes_t
 ZSTD_newRep(U32 const rep[ZSTD_REP_NUM], U32 const offBase, U32 const ll0)
 {
-    repcodes_t newReps;
+    Repcodes_t newReps;
     ZSTD_memcpy(&newReps, rep, sizeof(newReps));
     ZSTD_updateRep(newReps.rep, offBase, ll0);
     return newReps;
index 672af2d32c9951f69f232daaddcc1b1449144f1e..6f57345be62c31ccda488ab36ae79d4d1e1cfa30 100644 (file)
@@ -648,7 +648,7 @@ static size_t ZSTD_compressSubBlock_multi(const SeqStore_t* seqStorePtr,
         /* We have to regenerate the repcodes because we've skipped some sequences */
         if (sp < send) {
             const SeqDef* seq;
-            repcodes_t rep;
+            Repcodes_t rep;
             ZSTD_memcpy(&rep, prevCBlock->rep, sizeof(rep));
             for (seq = sstart; seq < sp; ++seq) {
                 ZSTD_updateRep(rep.rep, seq->offBase, ZSTD_getSequenceLength(seqStorePtr, seq).litLength == 0);
index 1aee5017be0ba1047b302351a9a27e815e41f4ad..8f3b4d10029216e3f2ef97b64bf53c395baa90f4 100644 (file)
@@ -1227,13 +1227,13 @@ ZSTD_compressBlock_opt_generic(ZSTD_MatchState_t* ms,
                           && (with1literal < opt[cur+1].price) ) {
                             /* update offset history - before it disappears */
                             U32 const prev = cur - prevMatch.mlen;
-                            repcodes_t const newReps = ZSTD_newRep(opt[prev].rep, prevMatch.off, opt[prev].litlen==0);
+                            Repcodes_t const newReps = ZSTD_newRep(opt[prev].rep, prevMatch.off, opt[prev].litlen==0);
                             assert(cur >= prevMatch.mlen);
                             DEBUGLOG(7, "==> match+1lit is cheaper (%.2f < %.2f) (hist:%u,%u,%u) !",
                                         ZSTD_fCost(with1literal), ZSTD_fCost(withMoreLiterals),
                                         newReps.rep[0], newReps.rep[1], newReps.rep[2] );
                             opt[cur+1] = prevMatch;  /* mlen & offbase */
-                            ZSTD_memcpy(opt[cur+1].rep, &newReps, sizeof(repcodes_t));
+                            ZSTD_memcpy(opt[cur+1].rep, &newReps, sizeof(Repcodes_t));
                             opt[cur+1].litlen = 1;
                             opt[cur+1].price = with1literal;
                             if (last_pos < cur+1) last_pos = cur+1;
@@ -1248,13 +1248,13 @@ ZSTD_compressBlock_opt_generic(ZSTD_MatchState_t* ms,
             /* Offset history is not updated during match comparison.
              * Do it here, now that the match is selected and confirmed.
              */
-            ZSTD_STATIC_ASSERT(sizeof(opt[cur].rep) == sizeof(repcodes_t));
+            ZSTD_STATIC_ASSERT(sizeof(opt[cur].rep) == sizeof(Repcodes_t));
             assert(cur >= opt[cur].mlen);
             if (opt[cur].litlen == 0) {
                 /* just finished a match => alter offset history */
                 U32 const prev = cur - opt[cur].mlen;
-                repcodes_t const newReps = ZSTD_newRep(opt[prev].rep, opt[cur].off, opt[prev].litlen==0);
-                ZSTD_memcpy(opt[cur].rep, &newReps, sizeof(repcodes_t));
+                Repcodes_t const newReps = ZSTD_newRep(opt[prev].rep, opt[cur].off, opt[prev].litlen==0);
+                ZSTD_memcpy(opt[cur].rep, &newReps, sizeof(Repcodes_t));
             }
 
             /* last match must start at a minimum distance of 8 from oend */
@@ -1353,10 +1353,10 @@ _shortestPath:   /* cur, last_pos, best_mlen, best_off have to be set */
         /* Update offset history */
         if (lastStretch.litlen == 0) {
             /* finishing on a match : update offset history */
-            repcodes_t const reps = ZSTD_newRep(opt[cur].rep, lastStretch.off, opt[cur].litlen==0);
-            ZSTD_memcpy(rep, &reps, sizeof(repcodes_t));
+            Repcodes_t const reps = ZSTD_newRep(opt[cur].rep, lastStretch.off, opt[cur].litlen==0);
+            ZSTD_memcpy(rep, &reps, sizeof(Repcodes_t));
         } else {
-            ZSTD_memcpy(rep, lastStretch.rep, sizeof(repcodes_t));
+            ZSTD_memcpy(rep, lastStretch.rep, sizeof(Repcodes_t));
             assert(cur >= lastStretch.litlen);
             cur -= lastStretch.litlen;
         }