From 38c45a4efaafe25b4346fdfa61d0e85c3e9c00a0 Mon Sep 17 00:00:00 2001 From: Amos Jeffries Date: Fri, 23 Jan 2015 02:11:15 -0800 Subject: [PATCH] Fix ::Parser::Tokenizer::prefix() limited token results When he prefix() method is passed a set limit for characters to scan and the matched characters do reach that limit the entire Tokenizer buffer content is consumed and returned. Correct operation is to only consume and return the matched characters. --- src/parser/Tokenizer.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/parser/Tokenizer.cc b/src/parser/Tokenizer.cc index 3d43c08b80..b9b9e26872 100644 --- a/src/parser/Tokenizer.cc +++ b/src/parser/Tokenizer.cc @@ -76,6 +76,8 @@ Parser::Tokenizer::prefix(SBuf &returnedToken, const CharacterSet &tokenChars, c return false; if (prefixLen == SBuf::npos && (atEnd() || limit == 0)) return false; + if (prefixLen == SBuf::npos && limit > 0) + prefixLen = limit; returnedToken = consume(prefixLen); // cannot be empty after the npos check return true; } -- 2.47.3