]> git.ipfire.org Git - thirdparty/dnspython.git/commitdiff
more tokenizer coverage improvements
authorBob Halley <halley@dnspython.org>
Fri, 24 Jul 2020 02:20:33 +0000 (19:20 -0700)
committerBob Halley <halley@dnspython.org>
Fri, 24 Jul 2020 02:20:33 +0000 (19:20 -0700)
dns/tokenizer.py
tests/test_tokenizer.py

index 0c117abd5617453ad555176f8a534f7341d1cf96..2a13e0f2edaf49dfddc6df206270eedee16dc6e1 100644 (file)
@@ -105,7 +105,7 @@ class Token:
             c = self.value[i]
             i += 1
             if c == '\\':
-                if i >= l:
+                if i >= l:  # pragma: no cover   (can't happen via get())
                     raise dns.exception.UnexpectedEnd
                 c = self.value[i]
                 i += 1
@@ -156,7 +156,7 @@ class Token:
             c = self.value[i]
             i += 1
             if c == '\\':
-                if i >= l:
+                if i >= l:  # pragma: no cover   (can't happen via get())
                     raise dns.exception.UnexpectedEnd
                 c = self.value[i]
                 i += 1
index 2abeaaecb0ea2346fdc254db6c89a33d4a2ed5cf..8340f46250eb14dfbbbfdd96750c7a03d7666827 100644 (file)
@@ -288,6 +288,11 @@ class TokenizerTestCase(unittest.TestCase):
             tok = dns.tokenizer.Tokenizer('"not an identifier"')
             tok.get_ttl()
 
+    def testBadGetEOL(self):
+        with self.assertRaises(dns.exception.SyntaxError):
+            tok = dns.tokenizer.Tokenizer('"not an identifier"')
+            tok.get_eol_as_token()
+
     def testDanglingEscapes(self):
         for text in ['"\\"', '"\\0"', '"\\00"', '"\\00a"']:
             with self.assertRaises(dns.exception.SyntaxError):
@@ -297,5 +302,50 @@ class TokenizerTestCase(unittest.TestCase):
                 tok = dns.tokenizer.Tokenizer(text)
                 tok.get().unescape_to_bytes()
 
+    def testTokenMisc(self):
+        t1 = dns.tokenizer.Token(dns.tokenizer.IDENTIFIER, 'hi')
+        t2 = dns.tokenizer.Token(dns.tokenizer.IDENTIFIER, 'hi')
+        t3 = dns.tokenizer.Token(dns.tokenizer.IDENTIFIER, 'there')
+        self.assertEqual(t1, t2)
+        self.assertFalse(t1 == 'hi')  # not NotEqual because we want to use ==
+        self.assertNotEqual(t1, 'hi')
+        self.assertNotEqual(t1, t3)
+        self.assertEqual(str(t1), '3 "hi"')
+
+    def testBadConcatenateRemaining(self):
+        with self.assertRaises(dns.exception.SyntaxError):
+            tok = dns.tokenizer.Tokenizer('a b "not an identifer" c')
+            tok.concatenate_remaining_identifiers()
+
+    def testStdinFilename(self):
+        tok = dns.tokenizer.Tokenizer()
+        self.assertEqual(tok.filename, '<stdin>')
+
+    def testBytesLiteral(self):
+        tok = dns.tokenizer.Tokenizer(b'this is input')
+        self.assertEqual(tok.get().value, 'this')
+        self.assertEqual(tok.filename, '<string>')
+        tok = dns.tokenizer.Tokenizer(b'this is input', 'myfilename')
+        self.assertEqual(tok.filename, 'myfilename')
+
+    def testUngetBranches(self):
+        tok = dns.tokenizer.Tokenizer(b'    this is input')
+        t = tok.get(want_leading=True)
+        tok.unget(t)
+        t = tok.get(want_leading=True)
+        self.assertEqual(t.ttype, dns.tokenizer.WHITESPACE)
+        tok.unget(t)
+        t = tok.get()
+        self.assertEqual(t.ttype, dns.tokenizer.IDENTIFIER)
+        self.assertEqual(t.value, 'this')
+        tok = dns.tokenizer.Tokenizer(b';    this is input\n')
+        t = tok.get(want_comment=True)
+        tok.unget(t)
+        t = tok.get(want_comment=True)
+        self.assertEqual(t.ttype, dns.tokenizer.COMMENT)
+        tok.unget(t)
+        t = tok.get()
+        self.assertEqual(t.ttype, dns.tokenizer.EOL)
+
 if __name__ == '__main__':
     unittest.main()