From: Amos Jeffries Date: Thu, 22 Jan 2015 12:53:11 +0000 (-0800) Subject: Fix Tokenizer::prefix when fetching length limited tokens X-Git-Tag: merge-candidate-3-v1~270^2~9 X-Git-Url: http://git.ipfire.org/?a=commitdiff_plain;h=476549749dcb529fcda7221bc88ae90b7edb4197;p=thirdparty%2Fsquid.git Fix Tokenizer::prefix when fetching length limited tokens --- diff --git a/src/parser/Tokenizer.cc b/src/parser/Tokenizer.cc index 3d43c08b80..c14937f678 100644 --- a/src/parser/Tokenizer.cc +++ b/src/parser/Tokenizer.cc @@ -71,11 +71,13 @@ Parser::Tokenizer::token(SBuf &returnedToken, const CharacterSet &delimiters) bool Parser::Tokenizer::prefix(SBuf &returnedToken, const CharacterSet &tokenChars, const SBuf::size_type limit) { - const SBuf::size_type prefixLen = buf_.substr(0,limit).findFirstNotOf(tokenChars); + SBuf::size_type prefixLen = buf_.substr(0,limit).findFirstNotOf(tokenChars); if (prefixLen == 0) return false; if (prefixLen == SBuf::npos && (atEnd() || limit == 0)) return false; + if (prefixLen == SBuf::npos && limit > 0) + prefixLen = limit; returnedToken = consume(prefixLen); // cannot be empty after the npos check return true; }