From: Windson yang Date: Sat, 25 Jan 2020 19:23:00 +0000 (+0800) Subject: bpo-36654: Add examples for using tokenize module programmically (#12947) X-Git-Tag: v3.9.0a4~217 X-Git-Url: http://git.ipfire.org/gitweb.cgi?a=commitdiff_plain;h=4b09dc79f4d08d85f2cc945563e9c8ef1e531d7b;p=thirdparty%2FPython%2Fcpython.git bpo-36654: Add examples for using tokenize module programmically (#12947) --- diff --git a/Doc/library/tokenize.rst b/Doc/library/tokenize.rst index b208ba46d17d..96778f23f8f0 100644 --- a/Doc/library/tokenize.rst +++ b/Doc/library/tokenize.rst @@ -278,3 +278,22 @@ The exact token type names can be displayed using the :option:`-e` option: 4,10-4,11: RPAR ')' 4,11-4,12: NEWLINE '\n' 5,0-5,0: ENDMARKER '' + +Example of tokenizing a file programmatically, reading unicode +strings instead of bytes with :func:`generate_tokens`:: + + import tokenize + + with tokenize.open('hello.py') as f: + tokens = tokenize.generate_tokens(f.readline) + for token in tokens: + print(token) + +Or reading bytes directly with :func:`.tokenize`:: + + import tokenize + + with open('hello.py', 'rb') as f: + tokens = tokenize.tokenize(f.readline) + for token in tokens: + print(token)