hs_error_t err;
const size_t datalen = 1024 * 1024;
size_t megabytes = 5 * 1024;
- char * data = new char[datalen];
- memset(data, 'X', datalen);
+ vector<char> data(datalen, 'X');
// build a database
hs_database_t *db = nullptr;
ASSERT_TRUE(stream != nullptr);
while (megabytes-- > 0) {
- err = hs_scan_stream(stream, data, datalen, 0, scratch, dummyHandler,
- nullptr);
+ err = hs_scan_stream(stream, data.data(), data.size(), 0, scratch,
+ dummyHandler, nullptr);
ASSERT_EQ(HS_SUCCESS, err);
}
// teardown
hs_free_scratch(scratch);
hs_free_database(db);
- delete [] data;
}
struct HugeScanMatchingData {
hs_error_t err;
const size_t datalen = 1024*1024;
- char * data = new char[datalen];
- memset(data, 'X', datalen);
+ vector<char> data(datalen, 'X');
// build a database
hs_database_t *db = nullptr;
// streaming mode scan of our megabyte of data gb*1024 times
unsigned long remaining = gb * 1024;
while (remaining-- > 0) {
- err = hs_scan_stream(stream, data, datalen, 0, scratch,
+ err = hs_scan_stream(stream, data.data(), data.size(), 0, scratch,
singleHandler, nullptr);
ASSERT_EQ(HS_SUCCESS, err);
ASSERT_EQ(0ULL, lastMatchTo);
// teardown
hs_free_scratch(scratch);
hs_free_database(db);
- delete[] data;
}
// Helper function to actually perform scans for BlockMatch test below