]> git.ipfire.org Git - thirdparty/Python/cpython.git/commitdiff
massive refactoring of the struct
authorKen Jin <28750310+Fidget-Spinner@users.noreply.github.com>
Fri, 7 Nov 2025 20:47:52 +0000 (20:47 +0000)
committerKen Jin <28750310+Fidget-Spinner@users.noreply.github.com>
Fri, 7 Nov 2025 20:47:52 +0000 (20:47 +0000)
Include/internal/pycore_interp_structs.h
Include/internal/pycore_optimizer.h
Lib/test/test_ast/test_ast.py
Python/ceval.c
Python/optimizer.c
Python/optimizer_analysis.c
Python/pystate.c

index b0f5d0abc3fcae8934308228ea26ebad76aff723..cac5e358a673f039111c51d6058a7be2e8dff1d1 100644 (file)
@@ -758,24 +758,26 @@ struct _Py_unique_id_pool {
 typedef _Py_CODEUNIT *(*_PyJitEntryFuncPtr)(struct _PyExecutorObject *exec, _PyInterpreterFrame *frame, _PyStackRef *stack_pointer, PyThreadState *tstate);
 
 typedef struct _PyJitTracerState {
+    struct {
+        int stack_depth;
+        int chain_depth;
+        struct _PyExitData *exit;
+        PyCodeObject *code; // Strong
+        PyFunctionObject *func; // Strong
+        _Py_CODEUNIT *start_instr;
+        _Py_CODEUNIT *close_loop_instr;
+        _Py_CODEUNIT *jump_backward_instr;
+    } initial_state;
     bool dependencies_still_valid;
     bool prev_instr_is_super;
     int code_max_size;
     int code_curr_size;
-    int initial_stack_depth;
-    int initial_chain_depth;
     int prev_instr_oparg;
     int prev_instr_stacklevel;
     int specialize_counter;
     _PyUOpInstruction *code_buffer;
-    _Py_CODEUNIT *start_instr;
-    _Py_CODEUNIT *close_loop_instr;
-    _Py_CODEUNIT *jump_backward_instr;
-    PyCodeObject *initial_code; // Strong
-    PyFunctionObject *initial_func; // Strong
     _Py_CODEUNIT *prev_instr;
     PyCodeObject *prev_instr_code; // Strong
-    struct _PyExitData *prev_exit;
     _PyInterpreterFrame *prev_instr_frame;
     _PyBloomFilter dependencies;
 } _PyJitTracerState;
index 28c6c3ecaea122156b803db60feb65c97c4def27..eea5608621e9e72a45758aaacebe66724d78b72c 100644 (file)
@@ -91,7 +91,7 @@ PyAPI_FUNC(void) _Py_Executors_InvalidateCold(PyInterpreterState *interp);
 #define TRACE_STACK_SIZE 5
 
 int _Py_uop_analyze_and_optimize(
-    PyFunctionObject *initial_func,
+    PyFunctionObject *func,
     _PyUOpInstruction *trace, int trace_len, int curr_stackentries,
     _PyBloomFilter *dependencies);
 
index 551de5851daace1fd79bc55fce1c262ee1988198..78ee7971849914fdbfaa43593468e5dd77808322 100644 (file)
@@ -3047,8 +3047,8 @@ class EndPositionTests(unittest.TestCase):
 
 class NodeTransformerTests(ASTTestMixin, unittest.TestCase):
     def assertASTTransformation(self, transformer_class,
-                                initial_code, expected_code):
-        initial_ast = ast.parse(dedent(initial_code))
+                                code, expected_code):
+        initial_ast = ast.parse(dedent(code))
         expected_ast = ast.parse(dedent(expected_code))
 
         transformer = transformer_class()
index 00c30cc3fd4bcf6fc1fd45707b43f6580c916b47..1b4ce9233c1925107a7732f55c8fe92a9de44a06 100644 (file)
@@ -1001,15 +1001,15 @@ bail_tracing_and_jit(PyThreadState *tstate, _PyInterpreterFrame *frame)
         err = _PyOptimizer_Optimize(frame, tstate);
     }
     // Deal with backoffs
-    _PyExitData *exit = tstate->interp->jit_state.prev_exit;
+    _PyExitData *exit = tstate->interp->jit_state.initial_state.exit;
     if (exit == NULL) {
         // We hold a strong reference to the code object, so the instruction won't be freed.
         if (err <= 0) {
-            _Py_BackoffCounter counter = tstate->interp->jit_state.jump_backward_instr[1].counter;
-            tstate->interp->jit_state.jump_backward_instr[1].counter = restart_backoff_counter(counter);
+            _Py_BackoffCounter counter = tstate->interp->jit_state.initial_state.jump_backward_instr[1].counter;
+            tstate->interp->jit_state.initial_state.jump_backward_instr[1].counter = restart_backoff_counter(counter);
         }
         else {
-            tstate->interp->jit_state.jump_backward_instr[1].counter = initial_jump_backoff_counter();
+            tstate->interp->jit_state.initial_state.jump_backward_instr[1].counter = initial_jump_backoff_counter();
         }
     }
     else {
index dbceb5aa5b6987050061d812e624094ddeced81c..90bf7069d8fe53ac973bd464c124a946923c359d 100644 (file)
@@ -119,13 +119,13 @@ _PyOptimizer_Optimize(
     _PyInterpreterFrame *frame, PyThreadState *tstate)
 {
     PyInterpreterState *interp = _PyInterpreterState_GET();
-    int chain_depth = tstate->interp->jit_state.initial_chain_depth;
+    int chain_depth = tstate->interp->jit_state.initial_state.chain_depth;
     assert(interp->jit);
     assert(!interp->compiling);
-    assert(tstate->interp->jit_state.initial_stack_depth >= 0);
+    assert(tstate->interp->jit_state.initial_state.stack_depth >= 0);
 #ifndef Py_GIL_DISABLED
     // Trace got stomped on by another thread.
-    if (tstate->interp->jit_state.initial_func == NULL) {
+    if (tstate->interp->jit_state.initial_state.func == NULL) {
         return 0;
     }
     interp->compiling = true;
@@ -135,8 +135,8 @@ _PyOptimizer_Optimize(
     // this is true, since a deopt won't infinitely re-enter the executor:
     chain_depth %= MAX_CHAIN_DEPTH;
     bool progress_needed = chain_depth == 0;
-    PyCodeObject *code = (PyCodeObject *)tstate->interp->jit_state.initial_code;
-    _Py_CODEUNIT *start = tstate->interp->jit_state.start_instr;
+    PyCodeObject *code = (PyCodeObject *)tstate->interp->jit_state.initial_state.code;
+    _Py_CODEUNIT *start = tstate->interp->jit_state.initial_state.start_instr;
     if (progress_needed && !has_space_for_executor(code, start)) {
         interp->compiling = false;
         return 0;
@@ -171,9 +171,9 @@ _PyOptimizer_Optimize(
     else {
         executor->vm_data.code = NULL;
     }
-    _PyExitData *prev_exit = tstate->interp->jit_state.prev_exit;
-    if (prev_exit != NULL) {
-        prev_exit->executor = executor;
+    _PyExitData *exit = tstate->interp->jit_state.initial_state.exit;
+    if (exit != NULL) {
+        exit->executor = executor;
     }
     executor->vm_data.chain_depth = chain_depth;
     assert(executor->vm_data.valid);
@@ -569,7 +569,7 @@ _PyJit_translate_single_bytecode_to_trace(
     if (old_code == NULL) {
         return 0;
     }
-    bool progress_needed = (tstate->interp->jit_state.initial_chain_depth % MAX_CHAIN_DEPTH) == 0;;
+    bool progress_needed = (tstate->interp->jit_state.initial_state.chain_depth % MAX_CHAIN_DEPTH) == 0;
     _PyBloomFilter *dependencies = &tstate->interp->jit_state.dependencies;
     _Py_BloomFilter_Add(dependencies, old_code);
     int trace_length = tstate->interp->jit_state.code_curr_size;
@@ -748,8 +748,8 @@ _PyJit_translate_single_bytecode_to_trace(
             _Py_FALLTHROUGH;
         case JUMP_BACKWARD_NO_INTERRUPT:
         {
-            if ((next_instr != tstate->interp->jit_state.close_loop_instr) &&
-                (next_instr != tstate->interp->jit_state.start_instr) &&
+            if ((next_instr != tstate->interp->jit_state.initial_state.close_loop_instr) &&
+                (next_instr != tstate->interp->jit_state.initial_state.start_instr) &&
                 tstate->interp->jit_state.code_curr_size > 5 &&
                 // These are coroutines, and we want to unroll those usually.
                 opcode != JUMP_BACKWARD_NO_INTERRUPT) {
@@ -760,7 +760,8 @@ _PyJit_translate_single_bytecode_to_trace(
                 OPT_STAT_INC(inner_loop);
                 ADD_TO_TRACE(_EXIT_TRACE, 0, 0, target);
                 trace[trace_length-1].operand1 = true; // is_control_flow
-                DPRINTF(2, "JUMP_BACKWARD not to top ends trace %p %p %p\n", next_instr, tstate->interp->jit_state.close_loop_instr, tstate->interp->jit_state.start_instr);
+                DPRINTF(2, "JUMP_BACKWARD not to top ends trace %p %p %p\n", next_instr,
+                    tstate->interp->jit_state.initial_state.close_loop_instr, tstate->interp->jit_state.initial_state.start_instr);
                 goto done;
             }
             break;
@@ -915,7 +916,8 @@ _PyJit_translate_single_bytecode_to_trace(
         }
     }
     // Loop back to the start
-    int is_first_instr = tstate->interp->jit_state.close_loop_instr == next_instr || tstate->interp->jit_state.start_instr == next_instr;
+    int is_first_instr = tstate->interp->jit_state.initial_state.close_loop_instr == next_instr ||
+        tstate->interp->jit_state.initial_state.start_instr == next_instr;
     if (is_first_instr && tstate->interp->jit_state.code_curr_size > 5) {
         if (needs_guard_ip) {
             ADD_TO_TRACE(_SET_IP, 0, (uintptr_t)next_instr, 0);
@@ -985,13 +987,13 @@ _PyJit_TryInitializeTracing(
     tstate->interp->jit_state.code_curr_size = 2;
 
     tstate->interp->jit_state.code_max_size = UOP_MAX_TRACE_LENGTH;
-    tstate->interp->jit_state.start_instr = start_instr;
-    tstate->interp->jit_state.close_loop_instr = close_loop_instr;
-    tstate->interp->jit_state.initial_code = (PyCodeObject *)Py_NewRef(code);
-    tstate->interp->jit_state.initial_func = (PyFunctionObject *)Py_XNewRef(PyStackRef_AsPyObjectBorrow(frame->f_funcobj));
-    tstate->interp->jit_state.prev_exit = exit;
-    tstate->interp->jit_state.initial_stack_depth = curr_stackdepth;
-    tstate->interp->jit_state.initial_chain_depth = chain_depth;
+    tstate->interp->jit_state.initial_state.start_instr = start_instr;
+    tstate->interp->jit_state.initial_state.close_loop_instr = close_loop_instr;
+    tstate->interp->jit_state.initial_state.code = (PyCodeObject *)Py_NewRef(code);
+    tstate->interp->jit_state.initial_state.func = (PyFunctionObject *)Py_XNewRef(PyStackRef_AsPyObjectBorrow(frame->f_funcobj));
+    tstate->interp->jit_state.initial_state.exit = exit;
+    tstate->interp->jit_state.initial_state.stack_depth = curr_stackdepth;
+    tstate->interp->jit_state.initial_state.chain_depth = chain_depth;
     tstate->interp->jit_state.prev_instr_frame = frame;
     tstate->interp->jit_state.dependencies_still_valid = true;
     tstate->interp->jit_state.specialize_counter = 0;
@@ -1002,7 +1004,7 @@ _PyJit_TryInitializeTracing(
     tstate->interp->jit_state.prev_instr_stacklevel = curr_stackdepth;
     tstate->interp->jit_state.prev_instr_is_super = false;
     assert(curr_instr->op.code == JUMP_BACKWARD_JIT || (exit != NULL));
-    tstate->interp->jit_state.jump_backward_instr = curr_instr;
+    tstate->interp->jit_state.initial_state.jump_backward_instr = curr_instr;
     assert(curr_instr->op.code == JUMP_BACKWARD_JIT || (exit != NULL));
     _Py_BloomFilter_Init(&tstate->interp->jit_state.dependencies);
     return 1;
@@ -1011,8 +1013,8 @@ _PyJit_TryInitializeTracing(
 void
 _PyJit_FinalizeTracing(PyThreadState *tstate)
 {
-    Py_CLEAR(tstate->interp->jit_state.initial_code);
-    Py_CLEAR(tstate->interp->jit_state.initial_func);
+    Py_CLEAR(tstate->interp->jit_state.initial_state.code);
+    Py_CLEAR(tstate->interp->jit_state.initial_state.func);
     Py_CLEAR(tstate->interp->jit_state.prev_instr_code);
     tstate->interp->jit_state.code_curr_size = 2;
     tstate->interp->jit_state.code_max_size = UOP_MAX_TRACE_LENGTH - 1;
@@ -1335,7 +1337,7 @@ uop_optimize(
     // It is the optimizer's responsibility to add the dependencies it requires on its own.
     _PyBloomFilter new_dependencies;
     _Py_BloomFilter_Init(&new_dependencies);
-    _Py_BloomFilter_Add(&new_dependencies, tstate->interp->jit_state.initial_code);
+    _Py_BloomFilter_Add(&new_dependencies, tstate->interp->jit_state.initial_state.code);
     PyInterpreterState *interp = _PyInterpreterState_GET();
     _PyUOpInstruction *buffer = interp->jit_state.code_buffer;
     OPT_STAT_INC(attempts);
@@ -1344,7 +1346,7 @@ uop_optimize(
     if (env_var == NULL || *env_var == '\0' || *env_var > '0') {
         is_noopt = false;
     }
-    int curr_stackentries = tstate->interp->jit_state.initial_stack_depth;
+    int curr_stackentries = tstate->interp->jit_state.initial_state.stack_depth;
     int length = interp->jit_state.code_curr_size;
     // Trace too short, don't bother.
     if (length <= 5) {
@@ -1354,7 +1356,7 @@ uop_optimize(
     assert(length < UOP_MAX_TRACE_LENGTH);
     OPT_STAT_INC(traces_created);
     if (!is_noopt) {
-        length = _Py_uop_analyze_and_optimize(tstate->interp->jit_state.initial_func, buffer,
+        length = _Py_uop_analyze_and_optimize(tstate->interp->jit_state.initial_state.func, buffer,
                                            length,
                                            curr_stackentries, &new_dependencies);
         if (length <= 0) {
@@ -1379,7 +1381,8 @@ uop_optimize(
     OPT_HIST(effective_trace_length(buffer, length), optimized_trace_length_hist);
     length = prepare_for_execution(buffer, length);
     assert(length <= UOP_MAX_TRACE_LENGTH);
-    _PyExecutorObject *executor = make_executor_from_uops(buffer, length, &new_dependencies, tstate->interp->jit_state.initial_chain_depth);
+    _PyExecutorObject *executor = make_executor_from_uops(
+        buffer, length, &new_dependencies, tstate->interp->jit_state.initial_state.chain_depth);
     if (executor == NULL) {
         return -1;
     }
index ad5f1b0388d5a761b4bb5e9bc50cefa7377d25ef..a24906cfdce570b9364fe2e125c4a1e9398a3314 100644 (file)
@@ -511,7 +511,7 @@ remove_unneeded_uops(_PyUOpInstruction *buffer, int buffer_size)
 //  > 0 - length of optimized trace
 int
 _Py_uop_analyze_and_optimize(
-    PyFunctionObject *initial_func,
+    PyFunctionObject *func,
     _PyUOpInstruction *buffer,
     int length,
     int curr_stacklen,
@@ -521,7 +521,7 @@ _Py_uop_analyze_and_optimize(
     OPT_STAT_INC(optimizer_attempts);
 
     length = optimize_uops(
-         initial_func, buffer,
+         func, buffer,
          length, curr_stacklen, dependencies);
 
     if (length == 0) {
index 0676a86965cf2918e1af89c2e6891fffbc530635..36d62ecae2ee0bb5812863588cf53de0245d7175 100644 (file)
@@ -547,10 +547,6 @@ init_interpreter(PyInterpreterState *interp,
 
 #ifdef _Py_TIER2
     interp->jit_state.code_buffer = NULL;
-    interp->jit_state.initial_stack_depth = -1;
-    interp->jit_state.initial_chain_depth = -1;
-    interp->jit_state.initial_code = NULL;
-    interp->jit_state.initial_func = NULL;
 #endif
     llist_init(&interp->mem_free_queue.head);
     llist_init(&interp->asyncio_tasks_head);