From: Raymond Hettinger Date: Tue, 5 Nov 2002 06:08:11 +0000 (+0000) Subject: SF 633560: tokenize.__all__ needs "generate_tokens" X-Git-Tag: v2.2.3c1~252 X-Git-Url: http://git.ipfire.org/gitweb.cgi?a=commitdiff_plain;h=3e134291739da589e9e5a99dc85becb442bc5a78;p=thirdparty%2FPython%2Fcpython.git SF 633560: tokenize.__all__ needs "generate_tokens" --- diff --git a/Lib/tokenize.py b/Lib/tokenize.py index b64be8083202..4787a8011c54 100644 --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -32,7 +32,8 @@ import string, re from token import * import token -__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize", "NL"] +__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize", + "generate_tokens", "NL"] del token COMMENT = N_TOKENS