struct RoseContext *tctxt = &scratch->tctxt;
tctxt->t = t;
- tctxt->depth = 1;
tctxt->groups = t->initialGroups;
tctxt->lit_offset_adjust = 1; // index after last byte
tctxt->delayLastEndOffset = 0;
void initContext(const struct RoseEngine *t, u8 *state, u64a offset,
struct hs_scratch *scratch, RoseCallback callback,
RoseCallbackSom som_callback, void *ctx) {
- struct RoseRuntimeState *rstate = getRuntimeState(state);
struct RoseContext *tctxt = &scratch->tctxt;
tctxt->t = t;
- tctxt->depth = rstate->stored_depth;
tctxt->groups = loadGroups(t, state); /* TODO: diff groups for eod */
tctxt->lit_offset_adjust = scratch->core_info.buf_offset
- scratch->core_info.hlen
static really_inline
void init_rstate(const struct RoseEngine *t, u8 *state) {
- // Set runtime state: initial depth is 1 and we take our initial groups
- // from the RoseEngine.
+ // Set runtime state: we take our initial groups from the RoseEngine.
DEBUG_PRINTF("setting initial groups to 0x%016llx\n", t->initialGroups);
struct RoseRuntimeState *rstate = getRuntimeState(state);
- rstate->stored_depth = 1;
storeGroups(t, state, t->initialGroups);
rstate->flags = 0;
rstate->broken = NOT_BROKEN;
printf("\n");
#endif
- DEBUG_PRINTF("STATE depth=%u, groups=0x%016llx\n", tctx->depth,
- tctx->groups);
+ DEBUG_PRINTF("STATE groups=0x%016llx\n", tctx->groups);
if (isLiteralDR(id)) {
return tctx->groups;
pushDelayedMatches(tl, real_end, tctx);
- /* we are just repopulating the delay queue, groups and depths should be
+ /* we are just repopulating the delay queue, groups should be
* already set from the original scan. */
return tctx->groups;
u64a real_end = ci->buf_offset + end; // index after last byte
DEBUG_PRINTF("MATCH id=%u offsets=[???,%llu]\n", id, real_end);
- DEBUG_PRINTF("STATE depth=%u, groups=0x%016llx\n", tctxt->depth,
- tctxt->groups);
+ DEBUG_PRINTF("STATE groups=0x%016llx\n", tctxt->groups);
if (can_stop_matching(tctxtToScratch(tctxt))) {
DEBUG_PRINTF("received a match when we're already dead!\n");
roseSquashGroup(tctxt, tl);
}
- DEBUG_PRINTF("DONE depth=%u, groups=0x%016llx\n", tctxt->depth,
- tctxt->groups);
+ DEBUG_PRINTF("DONE groups=0x%016llx\n", tctxt->groups);
if (real_end > t->floatingMinLiteralMatchOffset) {
recordAnchoredLiteralMatch(tctxt, id, real_end);
DEBUG_PRINTF("DELAYED MATCH id=%u offset=%llu\n", literal_id, offset);
hwlmcb_rv_t rv = roseProcessDelayedMatch(tctxt->t, offset, literal_id,
tctxt);
- DEBUG_PRINTF("DONE depth=%u, groups=0x%016llx\n", tctxt->depth,
- tctxt->groups);
+ DEBUG_PRINTF("DONE groups=0x%016llx\n", tctxt->groups);
/* delayed literals can't safely set groups.
* However we may be setting groups that successors already have
curr_loc);
hwlmcb_rv_t rv = roseProcessDelayedAnchoredMatch(tctxt->t, curr_loc,
literal_id, tctxt);
- DEBUG_PRINTF("DONE depth=%u, groups=0x%016llx\n", tctxt->depth,
- tctxt->groups);
+ DEBUG_PRINTF("DONE groups=0x%016llx\n", tctxt->groups);
/* anchored literals can't safely set groups.
* However we may be setting groups that successors already
#endif
DEBUG_PRINTF("last end %llu\n", tctx->lastEndOffset);
- DEBUG_PRINTF("STATE depth=%u, groups=0x%016llx\n", tctx->depth,
- tctx->groups);
+ DEBUG_PRINTF("STATE groups=0x%016llx\n", tctx->groups);
if (can_stop_matching(tctxtToScratch(tctx))) {
DEBUG_PRINTF("received a match when we're already dead!\n");
rv = roseProcessMainMatch(tctx->t, real_end, id, tctx);
- DEBUG_PRINTF("DONE depth=%hhu, groups=0x%016llx\n", tctx->depth,
- tctx->groups);
+ DEBUG_PRINTF("DONE groups=0x%016llx\n", tctx->groups);
if (rv != HWLM_TERMINATE_MATCHING) {
return tctx->groups;
return HWLM_CONTINUE_MATCHING;
}
-static really_inline
-void update_depth(struct RoseContext *tctxt, u8 depth) {
- u8 d = MAX(tctxt->depth, depth + 1);
- assert(d >= tctxt->depth);
- DEBUG_PRINTF("depth now %hhu was %hhu\n", d, tctxt->depth);
- tctxt->depth = d;
-}
-
static rose_inline
void roseFlushLastByteHistory(const struct RoseEngine *t, u8 *state,
u64a currEnd, struct RoseContext *tctxt) {
}
}
-static rose_inline
-void roseSetRole(const struct RoseEngine *t, u8 *state,
- struct RoseContext *tctxt, u32 stateIndex, u8 depth) {
- DEBUG_PRINTF("state idx=%u, depth=%u\n", stateIndex, depth);
- mmbit_set(getRoleState(state), t->rolesWithStateCount, stateIndex);
- update_depth(tctxt, depth);
-}
-
static rose_inline
void roseTriggerInfix(const struct RoseEngine *t, u64a start, u64a end, u32 qi,
u32 topEvent, u8 cancel, struct RoseContext *tctxt) {
PROGRAM_CASE(ANCHORED_DELAY) {
if (in_anchored && end > t->floatingMinLiteralMatchOffset) {
DEBUG_PRINTF("delay until playback\n");
- update_depth(tctxt, ri->depth);
tctxt->groups |= ri->groups;
*work_done = 1;
assert(ri->done_jump); // must progress
PROGRAM_NEXT_INSTRUCTION
PROGRAM_CASE(SET_STATE) {
- roseSetRole(t, tctxt->state, tctxt, ri->index, ri->depth);
+ DEBUG_PRINTF("set state index %u\n", ri->index);
+ mmbit_set(getRoleState(tctxt->state), t->rolesWithStateCount,
+ ri->index);
*work_done = 1;
}
PROGRAM_NEXT_INSTRUCTION
}
static
-void makeRoleAnchoredDelay(RoseBuildImpl &build, build_context &bc,
+void makeRoleAnchoredDelay(RoseBuildImpl &build, UNUSED build_context &bc,
RoseVertex v, vector<RoseInstruction> &program) {
// Only relevant for roles that can be triggered by the anchored table.
if (!build.isAnchored(v)) {
// floatingMinLiteralMatchOffset.
auto ri = RoseInstruction(ROSE_INSTR_ANCHORED_DELAY);
- ri.u.anchoredDelay.depth = (u8)min(254U, bc.depths.at(v));
ri.u.anchoredDelay.groups = build.g[v].groups;
program.push_back(ri);
}
u32 idx = it->second;
auto ri = RoseInstruction(ROSE_INSTR_SET_STATE);
ri.u.setState.index = idx;
- ri.u.setState.depth = (u8)min(254U, bc.depths.at(v));
program.push_back(ri);
}
aligned_unique_ptr<HWLM> sbtable = buildSmallBlockMatcher(*this, &sbsize);
build_context bc;
- bc.depths = findDepths(*this);
// Build NFAs
set<u32> no_retrigger_queues;
os << "ROSE LITERALS: a total of " << build.literals.right.size()
<< " literals and " << num_vertices(g) << " roles." << endl << endl;
- const auto depths = findDepths(build);
-
for (const auto &e : build.literals.right) {
u32 id = e.first;
const ue2_literal &s = e.second.s;
for (RoseVertex v : verts) {
// role info
- os << " Index " << g[v].idx << ": depth=" << depths.at(v)
- << ", groups=0x" << hex << setw(16) << setfill('0')
- << g[v].groups << dec;
+ os << " Index " << g[v].idx << ": groups=0x" << hex << setw(16)
+ << setfill('0') << g[v].groups << dec;
if (g[v].reports.empty()) {
os << ", report=NONE";
void fillHamsterLiteralList(const RoseBuildImpl &tbi, rose_literal_table table,
std::vector<hwlmLiteral> *hl);
-// Find the minimum depth in hops of each role. Note that a role may be
-// accessible from both the root and the anchored root.
-std::map<RoseVertex, u32> findDepths(const RoseBuildImpl &build);
-
#ifndef NDEBUG
bool canImplementGraphs(const RoseBuildImpl &tbi);
#endif
return graph || castle || dfa || haig;
}
-// Find the minimum depth in hops of each role. Note that a role may be
-// accessible from both the root and the anchored root.
-map<RoseVertex, u32> findDepths(const RoseBuildImpl &build) {
- const RoseGraph &g = build.g;
- map<RoseVertex, u32> depths;
-
- depths[build.root] = 0;
- depths[build.anchored_root] = 0;
-
- // BFS from root first.
- breadth_first_search(g, build.root, visitor(make_bfs_visitor(
- record_distances(boost::make_assoc_property_map(depths),
- boost::on_tree_edge()))).
- vertex_index_map(get(&RoseVertexProps::idx, g)));
-
- // BFS from anchored root, updating depths in the graph when they get
- // smaller.
- map<RoseVertex, u32> depthsAnch;
- breadth_first_search(g, build.anchored_root, visitor(make_bfs_visitor(
- record_distances(boost::make_assoc_property_map(depthsAnch),
- boost::on_tree_edge()))).
- vertex_index_map(get(&RoseVertexProps::idx, g)));
- for (const auto &e : depthsAnch) {
- if (contains(depths, e.first)) {
- LIMIT_TO_AT_MOST(&depths[e.first], e.second);
- } else {
- depths.insert(e);
- }
- }
-
- return depths;
-}
-
u32 roseQuality(const RoseEngine *t) {
/* Rose is low quality if the atable is a Mcclellan 16 or has multiple DFAs
*/
assert(code <= ROSE_INSTR_END);
switch (code) {
PROGRAM_CASE(ANCHORED_DELAY) {
- os << " depth " << u32{ri->depth} << endl;
os << " groups 0x" << std::hex << ri->groups << std::dec
<< endl;
os << " done_jump +" << ri->done_jump << endl;
PROGRAM_NEXT_INSTRUCTION
PROGRAM_CASE(SET_STATE) {
- os << " depth " << u32{ri->depth} << endl;
os << " index " << ri->index << endl;
}
PROGRAM_NEXT_INSTRUCTION
#endif
// Rose runtime state
struct RoseRuntimeState {
- u8 stored_depth; /* depth at stream boundary */
u8 flags; /* high bit true if delay rebuild needed */
u8 broken; /* user has requested that we stop matching */
#if defined(_WIN32)
struct ROSE_STRUCT_ANCHORED_DELAY {
u8 code; //!< From enum RoseInstructionCode.
- u8 depth; //!< Depth for this state.
rose_group groups; //!< Bitmask.
u32 done_jump; //!< Jump forward this many bytes if successful.
};
struct ROSE_STRUCT_SET_STATE {
u8 code; //!< From enum RoseInstructionCode.
- u8 depth; //!< Depth for this state.
u32 index; //!< State index in multibit.
};
tctxt->lastEndOffset = offset + length;
storeGroups(t, state, tctxt->groups);
struct RoseRuntimeState *rstate = getRuntimeState(state);
- rstate->stored_depth = tctxt->depth;
rstate->flags = delay_rb_status;
}
struct RoseContext *tctxt = &scratch->tctxt;
tctxt->t = t;
- tctxt->depth = rstate->stored_depth;
tctxt->mpv_inactive = 0;
tctxt->groups = loadGroups(t, state);
tctxt->lit_offset_adjust = offset + 1; // index after last byte
struct RoseContext {
const struct RoseEngine *t;
u8 *state; /**< base pointer to the full state */
- u8 depth;
u8 mpv_inactive;
u64a groups;
u64a lit_offset_adjust; /**< offset to add to matches coming from hwlm */