Datasets that hit the memcap limit need to be discarded if the memcap is
hit or otherwise the datasets are still loaded with partial data while
the signature is not loaded due to the memcap error.
Ticket: #6678
(cherry picked from commit
1f9600e487173b785de186184d93633b246425fd)
break;
}
+ if (set->hash && SC_ATOMIC_GET(set->hash->memcap_reached)) {
+ SCLogError("dataset too large for set memcap");
+ goto out_err;
+ }
+
SCLogDebug("set %p/%s type %u save %s load %s",
set, set->name, set->type, set->save, set->load);
SCLogError("failed to set up dataset '%s'.", name);
return -1;
}
- if (set->hash && SC_ATOMIC_GET(set->hash->memcap_reached)) {
- SCLogError("dataset too large for set memcap");
- return -1;
- }
cd = SCCalloc(1, sizeof(DetectDatasetData));
if (unlikely(cd == NULL))