]> git.ipfire.org Git - thirdparty/vectorscan.git/commitdiff
castle/repeat: fix a number of bugs
authorJustin Viiret <justin.viiret@intel.com>
Sun, 13 Mar 2016 22:28:17 +0000 (09:28 +1100)
committerMatthew Barr <matthew.barr@intel.com>
Wed, 20 Apr 2016 03:34:54 +0000 (13:34 +1000)
- Add fits_in_len_bytes assertions for packed stores.
  Corrects the assertion formerly on line 888.

- In exclusive mode, don't overwrite packedCtrlSize with the max of the
  group; each repeat should know how many bytes it is using, even if
  they share the same stream state.

- Ensure that exclusive mode stream state is sized correctly.

src/nfa/castlecompile.cpp
src/nfa/repeat.c

index 5e8b662af1e9fb6f08024da53931102878573ab4..d7312b855e3e9ac2d18f2be1abb48b6592652659 100644 (file)
@@ -361,25 +361,22 @@ void buildSubcastles(const CastleProto &proto, vector<SubCastle> &subs,
         DEBUG_PRINTF("sub %u: selected %s model for %s repeat\n", i,
                      repeatTypeName(rtype), pr.bounds.str().c_str());
 
-        u32 subScratchStateSize;
-        u32 subStreamStateSize;
-
         SubCastle &sub = subs[i];
         RepeatInfo &info = infos[i];
 
-        // handle exclusive case differently
+        info.packedCtrlSize = rsi.packedCtrlSize;
+        u32 subStreamStateSize = verify_u32(rsi.packedCtrlSize + rsi.stateSize);
+
+        // Handle stream/scratch space alloc for exclusive case differently.
         if (contains(groupId, i)) {
             u32 id = groupId.at(i);
-            maxStreamSize[id] = MAX(maxStreamSize[id], rsi.packedCtrlSize);
+            maxStreamSize[id] = max(maxStreamSize[id], subStreamStateSize);
+            // SubCastle full/stream state offsets are written in for the group
+            // below.
         } else {
-            subScratchStateSize = verify_u32(sizeof(RepeatControl));
-            subStreamStateSize = verify_u32(rsi.packedCtrlSize + rsi.stateSize);
-
-            info.packedCtrlSize = rsi.packedCtrlSize;
             sub.fullStateOffset = scratchStateSize;
             sub.streamStateOffset = streamStateSize;
-
-            scratchStateSize += subScratchStateSize;
+            scratchStateSize += verify_u32(sizeof(RepeatControl));
             streamStateSize += subStreamStateSize;
         }
 
@@ -420,8 +417,6 @@ void buildSubcastles(const CastleProto &proto, vector<SubCastle> &subs,
         u32 top = j.first;
         u32 id = j.second;
         SubCastle &sub = subs[top];
-        RepeatInfo &info = infos[top];
-        info.packedCtrlSize = maxStreamSize[id];
         if (!scratchOffset[id]) {
             sub.fullStateOffset = scratchStateSize;
             sub.streamStateOffset = streamStateSize;
index d12bc5a1d5fc227a59682e20708629d95f53651d..339829a52742edc01b16610504c69c894c896a09 100644 (file)
@@ -1,5 +1,5 @@
 /*
- * Copyright (c) 2015, Intel Corporation
+ * Copyright (c) 2015-2016, Intel Corporation
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions are met:
@@ -882,15 +882,25 @@ enum RepeatMatch repeatHasMatchTrailer(const struct RepeatInfo *info,
     return REPEAT_NOMATCH;
 }
 
+/** \brief True if the given value can be packed into len bytes.  */
+static really_inline
+int fits_in_len_bytes(u64a val, u32 len) {
+    if (len >= 8) {
+        return 1;
+    }
+    return val <= (1ULL << (len * 8));
+}
+
 static really_inline
 void storePackedRelative(char *dest, u64a val, u64a offset, u64a max, u32 len) {
     assert(val <= offset);
-    assert(max < (1ULL << (8 * len)));
+    assert(fits_in_len_bytes(max, len));
     u64a delta = offset - val;
     if (delta >= max) {
         delta = max;
     }
     DEBUG_PRINTF("delta %llu\n", delta);
+    assert(fits_in_len_bytes(delta, len));
     partial_store_u64a(dest, delta, len);
 }
 
@@ -967,6 +977,7 @@ void repeatPackBitmap(char *dest, const struct RepeatInfo *info,
     DEBUG_PRINTF("packing %llu into %u bytes\n", bitmap, info->packedCtrlSize);
 
     // Write out packed bitmap.
+    assert(fits_in_len_bytes(bitmap, info->packedCtrlSize));
     partial_store_u64a(dest, bitmap, info->packedCtrlSize);
 }
 
@@ -1440,6 +1451,7 @@ void repeatStoreSparseOptimalP(const struct RepeatInfo *info,
     DEBUG_PRINTF("xs->first:%u xs->last:%u patch:%u\n",
                  xs->first, xs->last, patch);
     DEBUG_PRINTF("value:%llu\n", val);
+    assert(fits_in_len_bytes(val, encoding_size));
     partial_store_u64a(ring + encoding_size * idx, val, encoding_size);
     mmbit_set(active, patch_count, idx);
 }