]> git.ipfire.org Git - thirdparty/Python/cpython.git/commitdiff
[3.12] gh-111031: Check more files in `test_tokenize` (GH-111032) (#111061)
authorMiss Islington (bot) <31488909+miss-islington@users.noreply.github.com>
Thu, 19 Oct 2023 09:18:22 +0000 (11:18 +0200)
committerGitHub <noreply@github.com>
Thu, 19 Oct 2023 09:18:22 +0000 (09:18 +0000)
gh-111031: Check more files in `test_tokenize` (GH-111032)
(cherry picked from commit e9b5399bee7106beeeb38a45cfef3f0ed3fdd703)

Co-authored-by: Nikita Sobolev <mail@sobolevn.me>
Lib/test/test_tokenize.py

index 57fc149b6a4fa8d9e66316032e570238b57a1a91..bbbc337b1883a9decce532b831900ef2b52d45f8 100644 (file)
@@ -1898,19 +1898,9 @@ class TestRoundtrip(TestCase):
         tempdir = os.path.dirname(__file__) or os.curdir
         testfiles = glob.glob(os.path.join(glob.escape(tempdir), "test*.py"))
 
-        # Tokenize is broken on test_pep3131.py because regular expressions are
-        # broken on the obscure unicode identifiers in it. *sigh*
-        # With roundtrip extended to test the 5-tuple mode of untokenize,
-        # 7 more testfiles fail.  Remove them also until the failure is diagnosed.
-
-        testfiles.remove(os.path.join(tempdir, "test_unicode_identifiers.py"))
-
         # TODO: Remove this once we can untokenize PEP 701 syntax
         testfiles.remove(os.path.join(tempdir, "test_fstring.py"))
 
-        for f in ('buffer', 'builtin', 'fileio', 'os', 'platform', 'sys'):
-            testfiles.remove(os.path.join(tempdir, "test_%s.py") % f)
-
         if not support.is_resource_enabled("cpu"):
             testfiles = random.sample(testfiles, 10)