From: Miss Islington (bot) <31488909+miss-islington@users.noreply.github.com> Date: Thu, 19 Oct 2023 09:18:22 +0000 (+0200) Subject: [3.12] gh-111031: Check more files in `test_tokenize` (GH-111032) (#111061) X-Git-Tag: v3.12.1~264 X-Git-Url: http://git.ipfire.org/?a=commitdiff_plain;h=40db34c901265280a637838af0dde310f89472a0;p=thirdparty%2FPython%2Fcpython.git [3.12] gh-111031: Check more files in `test_tokenize` (GH-111032) (#111061) gh-111031: Check more files in `test_tokenize` (GH-111032) (cherry picked from commit e9b5399bee7106beeeb38a45cfef3f0ed3fdd703) Co-authored-by: Nikita Sobolev --- diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 57fc149b6a4f..bbbc337b1883 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -1898,19 +1898,9 @@ class TestRoundtrip(TestCase): tempdir = os.path.dirname(__file__) or os.curdir testfiles = glob.glob(os.path.join(glob.escape(tempdir), "test*.py")) - # Tokenize is broken on test_pep3131.py because regular expressions are - # broken on the obscure unicode identifiers in it. *sigh* - # With roundtrip extended to test the 5-tuple mode of untokenize, - # 7 more testfiles fail. Remove them also until the failure is diagnosed. - - testfiles.remove(os.path.join(tempdir, "test_unicode_identifiers.py")) - # TODO: Remove this once we can untokenize PEP 701 syntax testfiles.remove(os.path.join(tempdir, "test_fstring.py")) - for f in ('buffer', 'builtin', 'fileio', 'os', 'platform', 'sys'): - testfiles.remove(os.path.join(tempdir, "test_%s.py") % f) - if not support.is_resource_enabled("cpu"): testfiles = random.sample(testfiles, 10)