From 3e134291739da589e9e5a99dc85becb442bc5a78 Mon Sep 17 00:00:00 2001 From: Raymond Hettinger Date: Tue, 5 Nov 2002 06:08:11 +0000 Subject: [PATCH] SF 633560: tokenize.__all__ needs "generate_tokens" --- Lib/tokenize.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/Lib/tokenize.py b/Lib/tokenize.py index b64be8083202..4787a8011c54 100644 --- a/Lib/tokenize.py +++ b/Lib/tokenize.py @@ -32,7 +32,8 @@ import string, re from token import * import token -__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize", "NL"] +__all__ = [x for x in dir(token) if x[0] != '_'] + ["COMMENT", "tokenize", + "generate_tokens", "NL"] del token COMMENT = N_TOKENS -- 2.47.3