return true;
}
-aligned_unique_ptr<HWLM> hwlmBuild(const vector<hwlmLiteral> &lits,
- bool make_small, const CompileContext &cc,
- UNUSED hwlm_group_t expected_groups) {
+bytecode_ptr<HWLM> hwlmBuild(const vector<hwlmLiteral> &lits, bool make_small,
+ const CompileContext &cc,
+ UNUSED hwlm_group_t expected_groups) {
assert(!lits.empty());
dumpLits(lits);
engType = HWLM_ENGINE_FDR;
auto fdr = fdrBuildTable(lits, make_small, cc.target_info, cc.grey);
if (fdr) {
- engSize = fdrSize(fdr.get());
+ engSize = fdr.size();
}
eng = move(fdr);
}
throw ResourceLimitError();
}
- auto h = aligned_zmalloc_unique<HWLM>(ROUNDUP_CL(sizeof(HWLM)) + engSize);
+ auto h = make_bytecode_ptr<HWLM>(ROUNDUP_CL(sizeof(HWLM)) + engSize, 64);
h->type = engType;
memcpy(HWLM_DATA(h.get()), eng.get(), engSize);
#include "hwlm.h"
#include "ue2common.h"
-#include "util/alloc.h"
+#include "util/bytecode_ptr.h"
#include <memory>
#include <vector>
* may result in a nullptr return value, or a std::bad_alloc exception being
* thrown.
*/
-aligned_unique_ptr<HWLM>
-hwlmBuild(const std::vector<hwlmLiteral> &lits, bool make_small,
- const CompileContext &cc,
- hwlm_group_t expected_groups = HWLM_ALL_GROUPS);
+bytecode_ptr<HWLM> hwlmBuild(const std::vector<hwlmLiteral> &lits,
+ bool make_small, const CompileContext &cc,
+ hwlm_group_t expected_groups = HWLM_ALL_GROUPS);
/**
* Returns an estimate of the number of repeated characters on the end of a
// Build floating HWLM matcher.
rose_group fgroups = 0;
- size_t fsize = 0;
auto ftable = buildFloatingMatcher(*this, fragments,
- bc.longLitLengthThreshold,
- &fgroups, &fsize, &historyRequired);
+ bc.longLitLengthThreshold, &fgroups, &historyRequired);
if (ftable) {
- proto.fmatcherOffset = bc.engine_blob.add(ftable.get(), fsize, 64);
+ proto.fmatcherOffset = bc.engine_blob.add(ftable);
bc.resources.has_floating = true;
}
// Build delay rebuild HWLM matcher.
- size_t drsize = 0;
auto drtable = buildDelayRebuildMatcher(*this, fragments,
- bc.longLitLengthThreshold, &drsize);
+ bc.longLitLengthThreshold);
if (drtable) {
- proto.drmatcherOffset = bc.engine_blob.add(drtable.get(), drsize, 64);
+ proto.drmatcherOffset = bc.engine_blob.add(drtable);
}
// Build EOD-anchored HWLM matcher.
- size_t esize = 0;
- auto etable = buildEodAnchoredMatcher(*this, fragments, &esize);
+ auto etable = buildEodAnchoredMatcher(*this, fragments);
if (etable) {
- proto.ematcherOffset = bc.engine_blob.add(etable.get(), esize, 64);
+ proto.ematcherOffset = bc.engine_blob.add(etable);
}
// Build small-block HWLM matcher.
- size_t sbsize = 0;
- auto sbtable = buildSmallBlockMatcher(*this, fragments, &sbsize);
+ auto sbtable = buildSmallBlockMatcher(*this, fragments);
if (sbtable) {
- proto.sbmatcherOffset = bc.engine_blob.add(sbtable.get(), sbsize, 64);
+ proto.sbmatcherOffset = bc.engine_blob.add(sbtable);
}
proto.activeArrayCount = proto.leftfixBeginQueue;
buildForwardAccel(&hwlm, mp.accel_lits, build.getInitialGroups());
}
-aligned_unique_ptr<HWLM>
-buildFloatingMatcher(const RoseBuildImpl &build,
- const vector<LitFragment> &fragments,
- size_t longLitLengthThreshold, rose_group *fgroups,
- size_t *fsize, size_t *historyRequired) {
- *fsize = 0;
+bytecode_ptr<HWLM> buildFloatingMatcher(const RoseBuildImpl &build,
+ const vector<LitFragment> &fragments,
+ size_t longLitLengthThreshold,
+ rose_group *fgroups,
+ size_t *historyRequired) {
*fgroups = 0;
auto mp = makeMatcherProto(build, fragments, ROSE_FLOATING, false,
*historyRequired = max(*historyRequired, mp.history_required);
}
- *fsize = hwlmSize(hwlm.get());
- assert(*fsize);
- DEBUG_PRINTF("built floating literal table size %zu bytes\n", *fsize);
+ DEBUG_PRINTF("built floating literal table size %zu bytes\n", hwlm.size());
return hwlm;
}
-aligned_unique_ptr<HWLM> buildDelayRebuildMatcher(const RoseBuildImpl &build,
- const vector<LitFragment> &fragments,
- size_t longLitLengthThreshold,
- size_t *drsize) {
- *drsize = 0;
-
+bytecode_ptr<HWLM>
+buildDelayRebuildMatcher(const RoseBuildImpl &build,
+ const vector<LitFragment> &fragments,
+ size_t longLitLengthThreshold) {
if (!build.cc.streaming) {
DEBUG_PRINTF("not streaming\n");
return nullptr;
buildAccel(build, mp, *hwlm);
- *drsize = hwlmSize(hwlm.get());
- assert(*drsize);
- DEBUG_PRINTF("built delay rebuild table size %zu bytes\n", *drsize);
+ DEBUG_PRINTF("built delay rebuild table size %zu bytes\n", hwlm.size());
return hwlm;
}
-aligned_unique_ptr<HWLM>
+bytecode_ptr<HWLM>
buildSmallBlockMatcher(const RoseBuildImpl &build,
- const vector<LitFragment> &fragments, size_t *sbsize) {
- *sbsize = 0;
-
+ const vector<LitFragment> &fragments) {
if (build.cc.streaming) {
DEBUG_PRINTF("streaming mode\n");
return nullptr;
buildAccel(build, mp, *hwlm);
- *sbsize = hwlmSize(hwlm.get());
- assert(*sbsize);
- DEBUG_PRINTF("built small block literal table size %zu bytes\n", *sbsize);
+ DEBUG_PRINTF("built small block literal table size %zu bytes\n",
+ hwlm.size());
return hwlm;
}
-aligned_unique_ptr<HWLM>
+bytecode_ptr<HWLM>
buildEodAnchoredMatcher(const RoseBuildImpl &build,
- const vector<LitFragment> &fragments, size_t *esize) {
- *esize = 0;
-
+ const vector<LitFragment> &fragments) {
auto mp = makeMatcherProto(build, fragments, ROSE_EOD_ANCHORED, false,
build.ematcher_region_size);
buildAccel(build, mp, *hwlm);
- *esize = hwlmSize(hwlm.get());
- assert(*esize);
- DEBUG_PRINTF("built eod-anchored literal table size %zu bytes\n", *esize);
+ DEBUG_PRINTF("built eod-anchored literal table size %zu bytes\n",
+ hwlm.size());
return hwlm;
}
#define ROSE_BUILD_MATCHERS_H
#include "rose_build_impl.h"
+#include "util/bytecode_ptr.h"
#include <vector>
u32 delay_program_offset = ROSE_INVALID_PROG_OFFSET;
};
-aligned_unique_ptr<HWLM> buildFloatingMatcher(const RoseBuildImpl &build,
- const std::vector<LitFragment> &fragments,
- size_t longLitLengthThreshold,
- rose_group *fgroups,
- size_t *fsize,
- size_t *historyRequired);
+bytecode_ptr<HWLM>
+buildFloatingMatcher(const RoseBuildImpl &build,
+ const std::vector<LitFragment> &fragments,
+ size_t longLitLengthThreshold, rose_group *fgroups,
+ size_t *historyRequired);
-aligned_unique_ptr<HWLM> buildDelayRebuildMatcher(const RoseBuildImpl &build,
- const std::vector<LitFragment> &fragments,
- size_t longLitLengthThreshold,
- size_t *drsize);
+bytecode_ptr<HWLM>
+buildDelayRebuildMatcher(const RoseBuildImpl &build,
+ const std::vector<LitFragment> &fragments,
+ size_t longLitLengthThreshold);
-aligned_unique_ptr<HWLM> buildSmallBlockMatcher(const RoseBuildImpl &build,
- const std::vector<LitFragment> &fragments,
- size_t *sbsize);
+bytecode_ptr<HWLM>
+buildSmallBlockMatcher(const RoseBuildImpl &build,
+ const std::vector<LitFragment> &fragments);
-aligned_unique_ptr<HWLM> buildEodAnchoredMatcher(const RoseBuildImpl &build,
- const std::vector<LitFragment> &fragments,
- size_t *esize);
+bytecode_ptr<HWLM>
+buildEodAnchoredMatcher(const RoseBuildImpl &build,
+ const std::vector<LitFragment> &fragments);
void findMoreLiteralMasks(RoseBuildImpl &build);