}
}
if (stream->is_end_stream_on_data_flush(source_id))
+ {
+ if (data_length > 0)
+ session_data->concurrent_files -= 1;
stream->set_state(source_id, STREAM_COMPLETE);
+ }
break;
case STREAM_BODY:
if (stream->is_end_stream_on_data_flush(source_id))
static const int DATA_SECTION_SIZE = 16384;
static const int FRAME_HEADER_LENGTH = 9;
static const uint32_t NO_STREAM_ID = 0xFFFFFFFF;
+static const uint32_t CONCURRENT_STREAMS_LIMIT = 100;
static const uint32_t HTTP2_GID = 121;
// Peg counts
// This enum must remain synchronized with Http2Module::peg_names[] in http2_tables.cc
enum PEG_COUNT { PEG_FLOW = 0, PEG_CONCURRENT_SESSIONS, PEG_MAX_CONCURRENT_SESSIONS,
- PEG_MAX_TABLE_ENTRIES, PEG_MAX_CONCURRENT_FILES, PEG_TOTAL_BYTES,
- PEG_COUNT__MAX };
+ PEG_MAX_TABLE_ENTRIES, PEG_MAX_CONCURRENT_FILES, PEG_TOTAL_BYTES, PEG_MAX_CONCURRENT_STREAMS,
+ PEG_FLOWS_OVER_STREAM_LIMIT, PEG_COUNT__MAX };
enum EventSid
{
EVENT_INVALID_PUSH_FRAME = 24,
EVENT_BAD_PUSH_SEQUENCE = 25,
EVENT_BAD_SETTINGS_VALUE = 26,
+ EVENT_TOO_MANY_STREAMS = 27,
EVENT__MAX_VALUE
};
INF_TRUNCATED_HEADER_LINE = 39,
INF_REQUEST_WITHOUT_METHOD = 40,
INF_CONNECT_WITHOUT_AUTHORITY = 41,
+ INF_TOO_MANY_STREAMS = 42,
INF__MAX_VALUE
};
Http2HpackDecoder hpack_decoder[2];
std::list<class StreamInfo> streams;
uint32_t concurrent_files = 0;
+ uint32_t concurrent_streams = 0;
// Internal to scan()
bool preface[2] = { true, false };
#include "http2_enum.h"
#include "http2_flow_data.h"
#include "http2_hpack.h"
+#include "http2_module.h"
#include "http2_request_line.h"
#include "http2_start_line.h"
#include "http2_status_line.h"
using namespace HttpCommon;
using namespace Http2Enums;
-
Http2HeadersFrameWithStartline::~Http2HeadersFrameWithStartline()
{
delete start_line_generator;
if (session_data->abort_flow[source_id])
return false;
+ if (!stream->get_hi_flow_data())
+ {
+ if (session_data->concurrent_streams < CONCURRENT_STREAMS_LIMIT)
+ {
+ session_data->concurrent_streams += 1;
+ if (session_data->concurrent_streams >
+ Http2Module::get_peg_counts(PEG_MAX_CONCURRENT_STREAMS))
+
+ {
+ Http2Module::increment_peg_counts(PEG_MAX_CONCURRENT_STREAMS);
+ }
+ }
+ else
+ {
+ *session_data->infractions[source_id] += INF_TOO_MANY_STREAMS;
+ session_data->events[source_id]->create_event(EVENT_TOO_MANY_STREAMS);
+ Http2Module::increment_peg_counts(PEG_FLOWS_OVER_STREAM_LIMIT);
+ session_data->abort_flow[SRC_CLIENT] = true;
+ session_data->abort_flow[SRC_SERVER] = true;
+ stream->set_state(SRC_CLIENT, STREAM_ERROR);
+ stream->set_state(SRC_SERVER, STREAM_ERROR);
+ return false;
+ }
+ }
+
// http_inspect scan() of start line
{
uint32_t flush_offset;
if (stream->get_state(SRC_CLIENT) == STREAM_EXPECT_HEADERS)
stream->set_state(SRC_CLIENT, STREAM_COMPLETE);
- assert(stream->get_state(SRC_SERVER) == STREAM_EXPECT_HEADERS);
+ assert(stream->get_state(SRC_SERVER) == STREAM_EXPECT_HEADERS or
+ stream->get_state(SRC_SERVER) == STREAM_ERROR);
assert((stream->get_state(SRC_CLIENT) == STREAM_COMPLETE) or
(stream->get_state(SRC_CLIENT) == STREAM_ERROR));
}
session_data->deallocate_hi_memory(hi_flow_data);
delete hi_flow_data;
hi_flow_data = nullptr;
+
+ assert(session_data->concurrent_streams > 0);
+ session_data->concurrent_streams -= 1;
}
}
{ EVENT_INVALID_PUSH_FRAME, "invalid HTTP/2 push promise frame" },
{ EVENT_BAD_PUSH_SEQUENCE, "HTTP/2 push promise frame sent at invalid time" },
{ EVENT_BAD_SETTINGS_VALUE, "invalid parameter value sent in HTTP/2 settings frame" },
+ { EVENT_TOO_MANY_STREAMS, "excessive concurrent HTTP/2 streams" },
{ 0, nullptr }
};
{ CountType::MAX, "max_concurrent_files", "maximum concurrent file transfers per HTTP/2 "
"connection" },
{ CountType::SUM, "total_bytes", "total HTTP/2 data bytes inspected" },
+ { CountType::MAX, "max_concurrent_streams", "maximum concurrent streams per HTTP/2 "
+ "connection" },
+ { CountType::SUM, "flows_over_stream_limit", "HTTP/2 flows exceeding 100 concurrent streams" },
{ CountType::END, nullptr, nullptr }
};