]> git.ipfire.org Git - thirdparty/Python/cpython.git/commitdiff
gh-92651: Remove the Include/token.h header file (#92652)
authorVictor Stinner <vstinner@python.org>
Wed, 11 May 2022 21:22:50 +0000 (23:22 +0200)
committerGitHub <noreply@github.com>
Wed, 11 May 2022 21:22:50 +0000 (23:22 +0200)
Remove the token.h header file. There was never any public tokenizer
C API. The token.h header file was only designed to be used by Python
internals.

Move Include/token.h to Include/internal/pycore_token.h. Including
this header file now requires that the Py_BUILD_CORE macro is
defined. It no longer checks for the Py_LIMITED_API macro.

Rename functions:

* PyToken_OneChar() => _PyToken_OneChar()
* PyToken_TwoChars() => _PyToken_TwoChars()
* PyToken_ThreeChars() => _PyToken_ThreeChars()

13 files changed:
Doc/whatsnew/3.12.rst
Include/internal/pycore_token.h [moved from Include/token.h with 87% similarity]
Makefile.pre.in
Misc/NEWS.d/next/C API/2022-05-11-02-33-10.gh-issue-92651.FIXLf0.rst [new file with mode: 0644]
PCbuild/pythoncore.vcxproj
PCbuild/pythoncore.vcxproj.filters
PCbuild/regen.targets
Parser/pegen.h
Parser/token.c
Parser/tokenizer.c
Parser/tokenizer.h
Python/pythonrun.c
Tools/scripts/generate_token.py

index 461d9db793400da71434d593c5349ec704064207..bc354c36cdaea4179aed923f9b4f2bc8bea8f43f 100644 (file)
@@ -139,3 +139,8 @@ Deprecated
 
 Removed
 -------
+
+* Remove the ``token.h`` header file. There was never any public tokenizer C
+  API. The ``token.h`` header file was only designed to be used by Python
+  internals.
+  (Contributed by Victor Stinner in :gh:`92651`.)
similarity index 87%
rename from Include/token.h
rename to Include/internal/pycore_token.h
index eb1b9ea47b469aff22032524129060a04659d28f..f9b8240e2168f26e0d596cf923dbb22282bbc0bb 100644 (file)
@@ -1,13 +1,16 @@
 /* Auto-generated by Tools/scripts/generate_token.py */
 
 /* Token types */
-#ifndef Py_LIMITED_API
-#ifndef Py_TOKEN_H
-#define Py_TOKEN_H
+#ifndef Py_INTERNAL_TOKEN_H
+#define Py_INTERNAL_TOKEN_H
 #ifdef __cplusplus
 extern "C" {
 #endif
 
+#ifndef Py_BUILD_CORE
+#  error "this header requires Py_BUILD_CORE define"
+#endif
+
 #undef TILDE   /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */
 
 #define ENDMARKER       0
@@ -85,13 +88,13 @@ extern "C" {
                                  (x) == DEDENT)
 
 
+// Symbols exported for test_peg_generator
 PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */
-PyAPI_FUNC(int) PyToken_OneChar(int);
-PyAPI_FUNC(int) PyToken_TwoChars(int, int);
-PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int);
+PyAPI_FUNC(int) _PyToken_OneChar(int);
+PyAPI_FUNC(int) _PyToken_TwoChars(int, int);
+PyAPI_FUNC(int) _PyToken_ThreeChars(int, int, int);
 
 #ifdef __cplusplus
 }
 #endif
-#endif /* !Py_TOKEN_H */
-#endif /* Py_LIMITED_API */
+#endif  // !Py_INTERNAL_TOKEN_H
index e45d4fe3ecb6ea5350cc7d07e8a25a9c2fdea920..869c78ee0d3c5019a1df47de2170e8e58c50e730 100644 (file)
@@ -1325,11 +1325,11 @@ regen-token:
        $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py rst \
                $(srcdir)/Grammar/Tokens \
                $(srcdir)/Doc/library/token-list.inc
-       # Regenerate Include/token.h from Grammar/Tokens
+       # Regenerate Include/internal/pycore_token.h from Grammar/Tokens
        # using Tools/scripts/generate_token.py
        $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py h \
                $(srcdir)/Grammar/Tokens \
-               $(srcdir)/Include/token.h
+               $(srcdir)/Include/internal/pycore_token.h
        # Regenerate Parser/token.c from Grammar/Tokens
        # using Tools/scripts/generate_token.py
        $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_token.py c \
@@ -1521,7 +1521,6 @@ PYTHON_HEADERS= \
                $(srcdir)/Include/structmember.h \
                $(srcdir)/Include/structseq.h \
                $(srcdir)/Include/sysmodule.h \
-               $(srcdir)/Include/token.h \
                $(srcdir)/Include/traceback.h \
                $(srcdir)/Include/tracemalloc.h \
                $(srcdir)/Include/tupleobject.h \
@@ -1632,6 +1631,7 @@ PYTHON_HEADERS= \
                $(srcdir)/Include/internal/pycore_structseq.h \
                $(srcdir)/Include/internal/pycore_symtable.h \
                $(srcdir)/Include/internal/pycore_sysmodule.h \
+               $(srcdir)/Include/internal/pycore_token.h \
                $(srcdir)/Include/internal/pycore_traceback.h \
                $(srcdir)/Include/internal/pycore_tuple.h \
                $(srcdir)/Include/internal/pycore_typeobject.h \
diff --git a/Misc/NEWS.d/next/C API/2022-05-11-02-33-10.gh-issue-92651.FIXLf0.rst b/Misc/NEWS.d/next/C API/2022-05-11-02-33-10.gh-issue-92651.FIXLf0.rst
new file mode 100644 (file)
index 0000000..60a8818
--- /dev/null
@@ -0,0 +1,3 @@
+Remove the ``token.h`` header file. There was never any public tokenizer C
+API. The ``token.h`` header file was only designed to be used by Python
+internals. Patch by Victor Stinner.
index 3ce116d2babb08e003304da75e20c33433e7325a..a35884b3c358879b252923a794260bbcb0afe5fb 100644 (file)
     <ClInclude Include="..\Include\internal\pycore_structseq.h" />
     <ClInclude Include="..\Include\internal\pycore_sysmodule.h" />
     <ClInclude Include="..\Include\internal\pycore_symtable.h" />
+    <ClInclude Include="..\Include\internal\pycore_token.h" />
     <ClInclude Include="..\Include\internal\pycore_traceback.h" />
     <ClInclude Include="..\Include\internal\pycore_tuple.h" />
     <ClInclude Include="..\Include\internal\pycore_typeobject.h" />
     <ClInclude Include="..\Include\structseq.h" />
     <ClInclude Include="..\Include\symtable.h" />
     <ClInclude Include="..\Include\sysmodule.h" />
-    <ClInclude Include="..\Include\token.h" />
     <ClInclude Include="..\Include\traceback.h" />
     <ClInclude Include="..\Include\tracemalloc.h" />
     <ClInclude Include="..\Include\tupleobject.h" />
index 542d551045686dbfaeb7081430a12201c0b87035..ff42cc92c4bd23060c516b6174924b08b2bafc9c 100644 (file)
     <ClInclude Include="..\Include\sysmodule.h">
       <Filter>Include</Filter>
     </ClInclude>
-    <ClInclude Include="..\Include\token.h">
-      <Filter>Include</Filter>
-    </ClInclude>
     <ClInclude Include="..\Include\traceback.h">
       <Filter>Include</Filter>
     </ClInclude>
     <ClInclude Include="..\Include\internal\pycore_symtable.h">
       <Filter>Include\internal</Filter>
     </ClInclude>
+    <ClInclude Include="..\Include\internal\pycore_token.h">
+      <Filter>Include\internal</Filter>
+    </ClInclude>
     <ClInclude Include="..\Include\internal\pycore_traceback.h">
       <Filter>Include\internal</Filter>
     </ClInclude>
index 24b5ced1de0e01bc658d5e81c692c77d99a8805d..9073bb6ab2bd69d73b92aac99dc6f8e893af55d6 100644 (file)
@@ -19,7 +19,7 @@
     <_TokenOutputs Include="$(PySourcePath)Doc\library\token-list.inc">
       <Format>rst</Format>
     </_TokenOutputs>
-    <_TokenOutputs Include="$(PySourcePath)Include\token.h">
+    <_TokenOutputs Include="$(PySourcePath)Include\internal\pycore_token.h">
       <Format>h</Format>
     </_TokenOutputs>
     <_TokenOutputs Include="$(PySourcePath)Parser\token.c">
index fe0c327b8755669390014faa444142d68f8fecc0..d6a6e4e1eeb2f9723bbac6419998c9f88b9bb5a7 100644 (file)
@@ -3,8 +3,8 @@
 
 #define PY_SSIZE_T_CLEAN
 #include <Python.h>
-#include <token.h>
 #include <pycore_ast.h>
+#include <pycore_token.h>
 
 #if 0
 #define PyPARSE_YIELD_IS_KEYWORD        0x0001
index 74bca0eff657b7cb393168f385021ec5c8c0ac12..fa03fbc450b2bcc1e2601d05a518392064a25fd8 100644 (file)
@@ -1,7 +1,7 @@
 /* Auto-generated by Tools/scripts/generate_token.py */
 
 #include "Python.h"
-#include "token.h"
+#include "pycore_token.h"
 
 /* Token names */
 
@@ -76,7 +76,7 @@ const char * const _PyParser_TokenNames[] = {
 /* Return the token corresponding to a single character */
 
 int
-PyToken_OneChar(int c1)
+_PyToken_OneChar(int c1)
 {
     switch (c1) {
     case '%': return PERCENT;
@@ -107,7 +107,7 @@ PyToken_OneChar(int c1)
 }
 
 int
-PyToken_TwoChars(int c1, int c2)
+_PyToken_TwoChars(int c1, int c2)
 {
     switch (c1) {
     case '!':
@@ -191,7 +191,7 @@ PyToken_TwoChars(int c1, int c2)
 }
 
 int
-PyToken_ThreeChars(int c1, int c2, int c3)
+_PyToken_ThreeChars(int c1, int c2, int c3)
 {
     switch (c1) {
     case '*':
index c450aa8e4636ce3028eb6060316c9433bae50306..7c797180956d54cd881bd8ce2078cb454d5c6556 100644 (file)
@@ -1992,10 +1992,10 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end)
     /* Check for two-character token */
     {
         int c2 = tok_nextc(tok);
-        int token = PyToken_TwoChars(c, c2);
+        int token = _PyToken_TwoChars(c, c2);
         if (token != OP) {
             int c3 = tok_nextc(tok);
-            int token3 = PyToken_ThreeChars(c, c2, c3);
+            int token3 = _PyToken_ThreeChars(c, c2, c3);
             if (token3 != OP) {
                 token = token3;
             }
@@ -2059,7 +2059,7 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end)
     /* Punctuation character */
     *p_start = tok->start;
     *p_end = tok->cur;
-    return PyToken_OneChar(c);
+    return _PyToken_OneChar(c);
 }
 
 int
index 0cb665104b2b86f9d57c07dc811a782da7fbab88..dba71bd60fefe4a92e3bf056eee885b3ce563e5f 100644 (file)
@@ -8,7 +8,7 @@ extern "C" {
 
 /* Tokenizer interface */
 
-#include "token.h"      /* For token types */
+#include "pycore_token.h" /* For token types */
 
 #define MAXINDENT 100   /* Max indentation level */
 #define MAXLEVEL 200    /* Max parentheses level */
index f12b9f6e9539a29245dd44d7f4f16d18c3b7bdb8..202df585f31c63f2470bc972e11c860357ed203a 100644 (file)
@@ -24,7 +24,6 @@
 #include "pycore_sysmodule.h"     // _PySys_Audit()
 #include "pycore_traceback.h"     // _PyTraceBack_Print_Indented()
 
-#include "token.h"                // INDENT
 #include "errcode.h"              // E_EOF
 #include "marshal.h"              // PyMarshal_ReadLongFromFile()
 
index 77bb5bd5eca02c1dc5482eb7319559d113d38cc3..d8be8b93de1416f5095c9f08be870251a66a0f69 100755 (executable)
@@ -51,13 +51,16 @@ token_h_template = """\
 /* Auto-generated by Tools/scripts/generate_token.py */
 
 /* Token types */
-#ifndef Py_LIMITED_API
-#ifndef Py_TOKEN_H
-#define Py_TOKEN_H
+#ifndef Py_INTERNAL_TOKEN_H
+#define Py_INTERNAL_TOKEN_H
 #ifdef __cplusplus
 extern "C" {
 #endif
 
+#ifndef Py_BUILD_CORE
+#  error "this header requires Py_BUILD_CORE define"
+#endif
+
 #undef TILDE   /* Prevent clash of our definition with system macro. Ex AIX, ioctl.h */
 
 %s\
@@ -75,19 +78,19 @@ extern "C" {
                                  (x) == DEDENT)
 
 
+// Symbols exported for test_peg_generator
 PyAPI_DATA(const char * const) _PyParser_TokenNames[]; /* Token names */
-PyAPI_FUNC(int) PyToken_OneChar(int);
-PyAPI_FUNC(int) PyToken_TwoChars(int, int);
-PyAPI_FUNC(int) PyToken_ThreeChars(int, int, int);
+PyAPI_FUNC(int) _PyToken_OneChar(int);
+PyAPI_FUNC(int) _PyToken_TwoChars(int, int);
+PyAPI_FUNC(int) _PyToken_ThreeChars(int, int, int);
 
 #ifdef __cplusplus
 }
 #endif
-#endif /* !Py_TOKEN_H */
-#endif /* Py_LIMITED_API */
+#endif  // !Py_INTERNAL_TOKEN_H
 """
 
-def make_h(infile, outfile='Include/token.h'):
+def make_h(infile, outfile='Include/internal/pycore_token.h'):
     tok_names, ERRORTOKEN, string_to_tok = load_tokens(infile)
 
     defines = []
@@ -106,7 +109,7 @@ token_c_template = """\
 /* Auto-generated by Tools/scripts/generate_token.py */
 
 #include "Python.h"
-#include "token.h"
+#include "pycore_token.h"
 
 /* Token names */
 
@@ -117,21 +120,21 @@ const char * const _PyParser_TokenNames[] = {
 /* Return the token corresponding to a single character */
 
 int
-PyToken_OneChar(int c1)
+_PyToken_OneChar(int c1)
 {
 %s\
     return OP;
 }
 
 int
-PyToken_TwoChars(int c1, int c2)
+_PyToken_TwoChars(int c1, int c2)
 {
 %s\
     return OP;
 }
 
 int
-PyToken_ThreeChars(int c1, int c2, int c3)
+_PyToken_ThreeChars(int c1, int c2, int c3)
 {
 %s\
     return OP;