From: Amos Jeffries Date: Fri, 23 Jan 2015 10:11:15 +0000 (-0800) Subject: Fix ::Parser::Tokenizer::prefix() limited token results X-Git-Tag: merge-candidate-3-v1~320 X-Git-Url: http://git.ipfire.org/?a=commitdiff_plain;h=38c45a4efaafe25b4346fdfa61d0e85c3e9c00a0;p=thirdparty%2Fsquid.git Fix ::Parser::Tokenizer::prefix() limited token results When he prefix() method is passed a set limit for characters to scan and the matched characters do reach that limit the entire Tokenizer buffer content is consumed and returned. Correct operation is to only consume and return the matched characters. --- diff --git a/src/parser/Tokenizer.cc b/src/parser/Tokenizer.cc index 3d43c08b80..b9b9e26872 100644 --- a/src/parser/Tokenizer.cc +++ b/src/parser/Tokenizer.cc @@ -76,6 +76,8 @@ Parser::Tokenizer::prefix(SBuf &returnedToken, const CharacterSet &tokenChars, c return false; if (prefixLen == SBuf::npos && (atEnd() || limit == 0)) return false; + if (prefixLen == SBuf::npos && limit > 0) + prefixLen = limit; returnedToken = consume(prefixLen); // cannot be empty after the npos check return true; }