]> git.ipfire.org Git - thirdparty/Python/cpython.git/commitdiff
gh-94808: Coverage: Test that maximum indentation level is handled (#95926)
authorMichael Droettboom <mdboom@gmail.com>
Thu, 6 Oct 2022 17:39:17 +0000 (13:39 -0400)
committerGitHub <noreply@github.com>
Thu, 6 Oct 2022 17:39:17 +0000 (10:39 -0700)
* gh-94808: Coverage: Test that maximum indentation level is handled

* Use "compile" rather than "exec"

Lib/test/test_tokenize.py

index 1272e1e9be002e4ab35fd8a9b35c2ceb98546ace..47f2c06685bcaa553902e071c86ea8a87f4c72ec 100644 (file)
@@ -3,7 +3,7 @@ from test.support import os_helper
 from tokenize import (tokenize, _tokenize, untokenize, NUMBER, NAME, OP,
                      STRING, ENDMARKER, ENCODING, tok_name, detect_encoding,
                      open as tokenize_open, Untokenizer, generate_tokens,
-                     NEWLINE, _generate_tokens_from_c_tokenizer)
+                     NEWLINE, _generate_tokens_from_c_tokenizer, DEDENT)
 from io import BytesIO, StringIO
 import unittest
 from textwrap import dedent
@@ -2512,6 +2512,26 @@ async def f():
         self.assertRaises(SyntaxError, get_tokens, "("*1000+"a"+")"*1000)
         self.assertRaises(SyntaxError, get_tokens, "]")
 
+    def test_max_indent(self):
+        MAXINDENT = 100
+
+        def generate_source(indents):
+            source = ''.join(('  ' * x) + 'if True:\n' for x in range(indents))
+            source += '  ' * indents + 'pass\n'
+            return source
+
+        valid = generate_source(MAXINDENT - 1)
+        tokens = list(_generate_tokens_from_c_tokenizer(valid))
+        self.assertEqual(tokens[-1].type, DEDENT)
+        compile(valid, "<string>", "exec")
+
+        invalid = generate_source(MAXINDENT)
+        tokens = list(_generate_tokens_from_c_tokenizer(invalid))
+        self.assertEqual(tokens[-1].type, NEWLINE)
+        self.assertRaises(
+            IndentationError, compile, invalid, "<string>", "exec"
+        )
+
     def test_continuation_lines_indentation(self):
         def get_tokens(string):
             return [(kind, string) for (kind, string, *_) in _generate_tokens_from_c_tokenizer(string)]