From 4f29dd31e0a9d3353919b54aac2bb6002b9511a6 Mon Sep 17 00:00:00 2001 From: Ken Jin <28750310+Fidget-Spinner@users.noreply.github.com> Date: Fri, 7 Nov 2025 20:17:32 +0000 Subject: [PATCH] Partially address review --- Include/internal/pycore_interp_structs.h | 5 +- Include/internal/pycore_optimizer.h | 11 ++-- Python/bytecodes.c | 21 ++++---- Python/ceval.c | 6 ++- Python/executor_cases.c.h | 14 +++--- Python/generated_cases.c.h | 4 +- Python/optimizer.c | 64 +++++++++++++----------- Python/optimizer_analysis.c | 1 + Python/optimizer_bytecodes.c | 4 ++ Python/optimizer_cases.c.h | 1 + 10 files changed, 69 insertions(+), 62 deletions(-) diff --git a/Include/internal/pycore_interp_structs.h b/Include/internal/pycore_interp_structs.h index e37cecdd1d54..b0f5d0abc3fc 100644 --- a/Include/internal/pycore_interp_structs.h +++ b/Include/internal/pycore_interp_structs.h @@ -768,15 +768,14 @@ typedef struct _PyJitTracerState { int prev_instr_stacklevel; int specialize_counter; _PyUOpInstruction *code_buffer; - _Py_CODEUNIT *insert_exec_instr; + _Py_CODEUNIT *start_instr; _Py_CODEUNIT *close_loop_instr; + _Py_CODEUNIT *jump_backward_instr; PyCodeObject *initial_code; // Strong PyFunctionObject *initial_func; // Strong _Py_CODEUNIT *prev_instr; PyCodeObject *prev_instr_code; // Strong struct _PyExitData *prev_exit; - struct _PyExecutorObject *prev_executor; // Strong - _Py_CODEUNIT *jump_backward_instr; _PyInterpreterFrame *prev_instr_frame; _PyBloomFilter dependencies; } _PyJitTracerState; diff --git a/Include/internal/pycore_optimizer.h b/Include/internal/pycore_optimizer.h index a1d95c7c2f4a..28c6c3ecaea1 100644 --- a/Include/internal/pycore_optimizer.h +++ b/Include/internal/pycore_optimizer.h @@ -36,9 +36,9 @@ typedef struct { typedef struct _PyExitData { uint32_t target; - uint16_t index; - char is_dynamic:4; - char is_control_flow:4; + uint16_t index:14; + uint16_t is_dynamic:1; + uint16_t is_control_flow:1; _Py_BackoffCounter temperature; struct _PyExecutorObject *executor; } _PyExitData; @@ -351,6 +351,7 @@ static inline int is_terminator(const _PyUOpInstruction *uop) int opcode = uop->opcode; return ( opcode == _EXIT_TRACE || + opcode == _DEOPT || opcode == _JUMP_TO_TOP || opcode == _DYNAMIC_EXIT ); @@ -367,9 +368,9 @@ int _PyJit_translate_single_bytecode_to_trace(PyThreadState *tstate, _PyInterpre int _PyJit_TryInitializeTracing(PyThreadState *tstate, _PyInterpreterFrame *frame, - _Py_CODEUNIT *curr_instr, _Py_CODEUNIT *insert_exec_instr, + _Py_CODEUNIT *curr_instr, _Py_CODEUNIT *start_instr, _Py_CODEUNIT *close_loop_instr, int curr_stackdepth, int chain_depth, _PyExitData *exit, - _PyExecutorObject *prev_exec, int oparg); + int oparg); void _PyJit_FinalizeTracing(PyThreadState *tstate); diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 2e720d3fb300..dfc7e2952420 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -1220,7 +1220,7 @@ dummy_func( PyObject *result = PyStackRef_AsPyObjectSteal(retval); if (IS_JIT_TRACING()) { #if _Py_TIER2 - _PyJit_translate_single_bytecode_to_trace(tstate, frame, next_instr); + _PyJit_translate_single_bytecode_to_trace(tstate, frame, NULL); LEAVE_TRACING(); int err = bail_tracing_and_jit(tstate, frame); if (err < 0) { @@ -2983,7 +2983,7 @@ dummy_func( oparg >>= 8; insert_exec_at--; } - int succ = _PyJit_TryInitializeTracing(tstate, frame, this_instr, insert_exec_at, next_instr, STACK_LEVEL(), 0, NULL, NULL, oparg); + int succ = _PyJit_TryInitializeTracing(tstate, frame, this_instr, insert_exec_at, next_instr, STACK_LEVEL(), 0, NULL, oparg); if (succ) { ENTER_TRACING(); } @@ -5268,7 +5268,7 @@ dummy_func( tier2 op(_EXIT_TRACE, (exit_p/4 --)) { _PyExitData *exit = (_PyExitData *)exit_p; #if defined(Py_DEBUG) && !defined(_Py_JIT) - const _Py_CODEUNIT *target = ((frame->owner >= FRAME_OWNED_BY_INTERPRETER) + const _Py_CODEUNIT *target = ((frame->owner == FRAME_OWNED_BY_INTERPRETER) ? _Py_INTERPRETER_TRAMPOLINE_INSTRUCTIONS_PTR : _PyFrame_GetBytecode(frame)) + exit->target; OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); @@ -5285,8 +5285,6 @@ dummy_func( TIER2_TO_TIER2(exit->executor); } - // Note: this is different than _COLD_EXIT/_EXIT_TRACE, as it may lead to multiple executors - // from a single exit! tier2 op(_DYNAMIC_EXIT, (exit_p/4 --)) { #if defined(Py_DEBUG) && !defined(_Py_JIT) _PyExitData *exit = (_PyExitData *)exit_p; @@ -5415,7 +5413,8 @@ dummy_func( } tier2 op(_DEOPT, (--)) { - GOTO_TIER_ONE(_PyFrame_GetBytecode(frame) + CURRENT_TARGET()); + GOTO_TIER_ONE((frame->owner == FRAME_OWNED_BY_INTERPRETER) + ? _Py_INTERPRETER_TRAMPOLINE_INSTRUCTIONS_PTR : _PyFrame_GetBytecode(frame) + CURRENT_TARGET()); } tier2 op(_HANDLE_PENDING_AND_DEOPT, (--)) { @@ -5445,8 +5444,8 @@ dummy_func( tier2 op(_COLD_EXIT, ( -- )) { _PyExitData *exit = tstate->jit_exit; assert(exit != NULL); - _Py_CODEUNIT *target = ((frame->owner >= FRAME_OWNED_BY_INTERPRETER) - ? (_Py_CODEUNIT *)_Py_INTERPRETER_TRAMPOLINE_INSTRUCTIONS_PTR : _PyFrame_GetBytecode(frame)) + exit->target; + assert(frame->owner < FRAME_OWNED_BY_INTERPRETER); + _Py_CODEUNIT *target = _PyFrame_GetBytecode(frame) + exit->target; _Py_BackoffCounter temperature = exit->temperature; _PyExecutorObject *executor; if (target->op.code == ENTER_EXECUTOR) { @@ -5458,9 +5457,6 @@ dummy_func( TIER2_TO_TIER2(exit->executor); } else { - if (frame->owner >= FRAME_OWNED_BY_INTERPRETER) { - GOTO_TIER_ONE(target); - } if (!backoff_counter_triggers(temperature)) { exit->temperature = advance_backoff_counter(temperature); GOTO_TIER_ONE(target); @@ -5473,7 +5469,7 @@ dummy_func( // Note: it's safe to use target->op.arg here instead of the oparg given by EXTENDED_ARG. // The invariant in the optimizer is the deopt target always points back to the first EXTENDED_ARG. // So setting it to anything else is wrong. - int succ = _PyJit_TryInitializeTracing(tstate, frame, target, target, target, STACK_LEVEL(), chain_depth, exit, previous_executor, target->op.arg); + int succ = _PyJit_TryInitializeTracing(tstate, frame, target, target, target, STACK_LEVEL(), chain_depth, exit, target->op.arg); exit->temperature = restart_backoff_counter(exit->temperature); if (succ) { GOTO_TIER_ONE_CONTINUE_TRACING(target); @@ -5483,6 +5479,7 @@ dummy_func( } tier2 op(_COLD_DYNAMIC_EXIT, ( -- )) { + // TODO (gh-139109): This should be similar to _COLD_EXIT in the future. _Py_CODEUNIT *target = frame->instr_ptr; GOTO_TIER_ONE(target); } diff --git a/Python/ceval.c b/Python/ceval.c index 074b0f8b86e9..00c30cc3fd4b 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -1105,9 +1105,11 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, _PyInterpreterFrame *frame, int _Py_CODEUNIT *next_instr; _PyStackRef *stack_pointer; entry.stack[0] = PyStackRef_NULL; - entry.frame.f_funcobj = PyStackRef_NULL; -#if defined(Py_DEBUG) +#ifdef Py_STACKREF_DEBUG + entry.frame.f_funcobj = PyStackRef_None; +#elif defined(Py_DEBUG) /* Set these to invalid but identifiable values for debugging. */ + entry.frame.f_funcobj = (_PyStackRef){.bits = 0xaaa0}; entry.frame.f_locals = (PyObject*)0xaaa1; entry.frame.frame_obj = (PyFrameObject*)0xaaa2; entry.frame.f_globals = (PyObject*)0xaaa3; diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 1e5230bad976..16db3e2efcfc 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -7111,7 +7111,7 @@ PyObject *exit_p = (PyObject *)CURRENT_OPERAND0(); _PyExitData *exit = (_PyExitData *)exit_p; #if defined(Py_DEBUG) && !defined(_Py_JIT) - const _Py_CODEUNIT *target = ((frame->owner >= FRAME_OWNED_BY_INTERPRETER) + const _Py_CODEUNIT *target = ((frame->owner == FRAME_OWNED_BY_INTERPRETER) ? _Py_INTERPRETER_TRAMPOLINE_INSTRUCTIONS_PTR : _PyFrame_GetBytecode(frame)) + exit->target; OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); @@ -7446,7 +7446,8 @@ } case _DEOPT: { - GOTO_TIER_ONE(_PyFrame_GetBytecode(frame) + CURRENT_TARGET()); + GOTO_TIER_ONE((frame->owner == FRAME_OWNED_BY_INTERPRETER) + ? _Py_INTERPRETER_TRAMPOLINE_INSTRUCTIONS_PTR : _PyFrame_GetBytecode(frame) + CURRENT_TARGET()); break; } @@ -7487,8 +7488,8 @@ case _COLD_EXIT: { _PyExitData *exit = tstate->jit_exit; assert(exit != NULL); - _Py_CODEUNIT *target = ((frame->owner >= FRAME_OWNED_BY_INTERPRETER) - ? (_Py_CODEUNIT *)_Py_INTERPRETER_TRAMPOLINE_INSTRUCTIONS_PTR : _PyFrame_GetBytecode(frame)) + exit->target; + assert(frame->owner < FRAME_OWNED_BY_INTERPRETER); + _Py_CODEUNIT *target = _PyFrame_GetBytecode(frame) + exit->target; _Py_BackoffCounter temperature = exit->temperature; _PyExecutorObject *executor; if (target->op.code == ENTER_EXECUTOR) { @@ -7500,9 +7501,6 @@ TIER2_TO_TIER2(exit->executor); } else { - if (frame->owner >= FRAME_OWNED_BY_INTERPRETER) { - GOTO_TIER_ONE(target); - } if (!backoff_counter_triggers(temperature)) { exit->temperature = advance_backoff_counter(temperature); GOTO_TIER_ONE(target); @@ -7510,7 +7508,7 @@ _PyExecutorObject *previous_executor = _PyExecutor_FromExit(exit); assert(tstate->current_executor == (PyObject *)previous_executor); int chain_depth = previous_executor->vm_data.chain_depth + !exit->is_control_flow; - int succ = _PyJit_TryInitializeTracing(tstate, frame, target, target, target, STACK_LEVEL(), chain_depth, exit, previous_executor, target->op.arg); + int succ = _PyJit_TryInitializeTracing(tstate, frame, target, target, target, STACK_LEVEL(), chain_depth, exit, target->op.arg); exit->temperature = restart_backoff_counter(exit->temperature); if (succ) { GOTO_TIER_ONE_CONTINUE_TRACING(target); diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index e500ec93fff4..33ac4152bd5e 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -7557,7 +7557,7 @@ stack_pointer += -1; assert(WITHIN_STACK_BOUNDS()); _PyFrame_SetStackPointer(frame, stack_pointer); - _PyJit_translate_single_bytecode_to_trace(tstate, frame, next_instr); + _PyJit_translate_single_bytecode_to_trace(tstate, frame, NULL); stack_pointer = _PyFrame_GetStackPointer(frame); LEAVE_TRACING(); _PyFrame_SetStackPointer(frame, stack_pointer); @@ -7716,7 +7716,7 @@ oparg >>= 8; insert_exec_at--; } - int succ = _PyJit_TryInitializeTracing(tstate, frame, this_instr, insert_exec_at, next_instr, STACK_LEVEL(), 0, NULL, NULL, oparg); + int succ = _PyJit_TryInitializeTracing(tstate, frame, this_instr, insert_exec_at, next_instr, STACK_LEVEL(), 0, NULL, oparg); if (succ) { ENTER_TRACING(); } diff --git a/Python/optimizer.c b/Python/optimizer.c index 845ed3d86c09..19d86115154e 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -136,7 +136,7 @@ _PyOptimizer_Optimize( chain_depth %= MAX_CHAIN_DEPTH; bool progress_needed = chain_depth == 0; PyCodeObject *code = (PyCodeObject *)tstate->interp->jit_state.initial_code; - _Py_CODEUNIT *start = tstate->interp->jit_state.insert_exec_instr; + _Py_CODEUNIT *start = tstate->interp->jit_state.start_instr; if (progress_needed && !has_space_for_executor(code, start)) { interp->compiling = false; return 0; @@ -608,10 +608,6 @@ _PyJit_translate_single_bytecode_to_trace( // Strange control-flow bool has_dynamic_jump_taken = OPCODE_HAS_UNPREDICTABLE_JUMP(opcode) && (next_instr != this_instr + 1 + _PyOpcode_Caches[_PyOpcode_Deopt[opcode]]); - if (has_dynamic_jump_taken) { - DPRINTF(2, "Unsupported: dynamic jump taken\n"); - goto unsupported; - } /* Special case the first instruction, * so that we can guarantee forward progress */ @@ -624,6 +620,10 @@ _PyJit_translate_single_bytecode_to_trace( } bool needs_guard_ip = _PyOpcode_NeedsGuardIp[opcode]; + if (has_dynamic_jump_taken && !needs_guard_ip) { + DPRINTF(2, "Unsupported: dynamic jump taken\n"); + goto unsupported; + } DPRINTF(2, "%p %d: %s(%d) %d %d\n", old_code, target, _PyOpcode_OpName[opcode], oparg, needs_guard_ip, old_stack_level); #ifdef Py_DEBUG @@ -749,7 +749,7 @@ _PyJit_translate_single_bytecode_to_trace( case JUMP_BACKWARD_NO_INTERRUPT: { if ((next_instr != tstate->interp->jit_state.close_loop_instr) && - (next_instr != tstate->interp->jit_state.insert_exec_instr) && + (next_instr != tstate->interp->jit_state.start_instr) && tstate->interp->jit_state.code_curr_size > 5 && // These are coroutines, and we want to unroll those usually. opcode != JUMP_BACKWARD_NO_INTERRUPT) { @@ -760,7 +760,7 @@ _PyJit_translate_single_bytecode_to_trace( OPT_STAT_INC(inner_loop); ADD_TO_TRACE(_EXIT_TRACE, 0, 0, target); trace[trace_length-1].operand1 = true; // is_control_flow - DPRINTF(2, "JUMP_BACKWARD not to top ends trace %p %p %p\n", next_instr, tstate->interp->jit_state.close_loop_instr, tstate->interp->jit_state.insert_exec_instr); + DPRINTF(2, "JUMP_BACKWARD not to top ends trace %p %p %p\n", next_instr, tstate->interp->jit_state.close_loop_instr, tstate->interp->jit_state.start_instr); goto done; } break; @@ -772,7 +772,9 @@ _PyJit_translate_single_bytecode_to_trace( * start with RESUME_CHECK */ ADD_TO_TRACE(_TIER2_RESUME_CHECK, 0, 0, target); break; - + case INTERPRETER_EXIT: + ADD_TO_TRACE(_DEOPT, 0, 0, target); + goto done;; default: { const struct opcode_macro_expansion *expansion = &_PyOpcode_macro_expansion[opcode]; @@ -862,18 +864,18 @@ _PyJit_translate_single_bytecode_to_trace( PyCodeObject *new_code = (PyCodeObject *)PyStackRef_AsPyObjectBorrow(frame->f_executable); PyFunctionObject *new_func = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(frame->f_funcobj); - if (new_func != NULL) { - operand = (uintptr_t)new_func; - DPRINTF(2, "Adding %p func to op\n", (void *)operand); - _Py_BloomFilter_Add(dependencies, new_func); - } - else if (new_code != NULL && !Py_IsNone((PyObject*)new_code)) { - operand = (uintptr_t)new_code | 1; - DPRINTF(2, "Adding %p code to op\n", (void *)operand); - _Py_BloomFilter_Add(dependencies, new_code); - } - else { - operand = 0; + operand = 0; + if (frame->owner < FRAME_OWNED_BY_INTERPRETER) { + if (new_func != NULL) { + operand = (uintptr_t)new_func; + DPRINTF(2, "Adding %p func to op\n", (void *)operand); + _Py_BloomFilter_Add(dependencies, new_func); + } + else if (new_code != NULL && !Py_IsNone((PyObject*)new_code)) { + operand = (uintptr_t)new_code | 1; + DPRINTF(2, "Adding %p code to op\n", (void *)operand); + _Py_BloomFilter_Add(dependencies, new_code); + } } ADD_TO_TRACE(uop, oparg, operand, target); trace[trace_length - 1].operand1 = PyStackRef_IsNone(frame->f_executable) ? 2 : ((int)(frame->stackpointer - _PyFrame_Stackbase(frame))); @@ -913,8 +915,11 @@ _PyJit_translate_single_bytecode_to_trace( } } // Loop back to the start - int is_first_instr = tstate->interp->jit_state.close_loop_instr == next_instr || tstate->interp->jit_state.insert_exec_instr == next_instr; + int is_first_instr = tstate->interp->jit_state.close_loop_instr == next_instr || tstate->interp->jit_state.start_instr == next_instr; if (is_first_instr && tstate->interp->jit_state.code_curr_size > 5) { + if (needs_guard_ip) { + ADD_TO_TRACE(_SET_IP, 0, (uintptr_t)next_instr, 0); + } ADD_TO_TRACE(_JUMP_TO_TOP, 0, 0, 0); goto done; } @@ -945,7 +950,10 @@ full: // Returns 0 for do not enter tracing, 1 on enter tracing. int -_PyJit_TryInitializeTracing(PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *curr_instr, _Py_CODEUNIT *insert_exec_instr, _Py_CODEUNIT *close_loop_instr, int curr_stackdepth, int chain_depth, _PyExitData *exit, _PyExecutorObject *prev_exec, int oparg) +_PyJit_TryInitializeTracing( + PyThreadState *tstate, _PyInterpreterFrame *frame, _Py_CODEUNIT *curr_instr, + _Py_CODEUNIT *start_instr, _Py_CODEUNIT *close_loop_instr, int curr_stackdepth, int chain_depth, + _PyExitData *exit, int oparg) { // A recursive trace. // Don't trace into the inner call because it will stomp on the previous trace, causing endless retraces. @@ -972,12 +980,12 @@ _PyJit_TryInitializeTracing(PyThreadState *tstate, _PyInterpreterFrame *frame, _ chain_depth); #endif - add_to_trace(tstate->interp->jit_state.code_buffer, 0, _START_EXECUTOR, 0, (uintptr_t)insert_exec_instr, INSTR_IP(insert_exec_instr, code)); + add_to_trace(tstate->interp->jit_state.code_buffer, 0, _START_EXECUTOR, 0, (uintptr_t)start_instr, INSTR_IP(start_instr, code)); add_to_trace(tstate->interp->jit_state.code_buffer, 1, _MAKE_WARM, 0, 0, 0); tstate->interp->jit_state.code_curr_size = 2; tstate->interp->jit_state.code_max_size = UOP_MAX_TRACE_LENGTH; - tstate->interp->jit_state.insert_exec_instr = insert_exec_instr; + tstate->interp->jit_state.start_instr = start_instr; tstate->interp->jit_state.close_loop_instr = close_loop_instr; tstate->interp->jit_state.initial_code = (PyCodeObject *)Py_NewRef(code); tstate->interp->jit_state.initial_func = (PyFunctionObject *)Py_XNewRef(PyStackRef_AsPyObjectBorrow(frame->f_funcobj)); @@ -993,9 +1001,9 @@ _PyJit_TryInitializeTracing(PyThreadState *tstate, _PyInterpreterFrame *frame, _ tstate->interp->jit_state.prev_instr_oparg = oparg; tstate->interp->jit_state.prev_instr_stacklevel = curr_stackdepth; tstate->interp->jit_state.prev_instr_is_super = false; - assert(curr_instr->op.code == JUMP_BACKWARD_JIT || (prev_exec != NULL && exit != NULL)); + assert(curr_instr->op.code == JUMP_BACKWARD_JIT || (exit != NULL)); tstate->interp->jit_state.jump_backward_instr = curr_instr; - tstate->interp->jit_state.prev_executor = (_PyExecutorObject *)Py_XNewRef(prev_exec); + assert(curr_instr->op.code == JUMP_BACKWARD_JIT || (exit != NULL)); _Py_BloomFilter_Init(&tstate->interp->jit_state.dependencies); return 1; } @@ -1006,7 +1014,6 @@ _PyJit_FinalizeTracing(PyThreadState *tstate) Py_CLEAR(tstate->interp->jit_state.initial_code); Py_CLEAR(tstate->interp->jit_state.initial_func); Py_CLEAR(tstate->interp->jit_state.prev_instr_code); - Py_CLEAR(tstate->interp->jit_state.prev_executor); tstate->interp->jit_state.code_curr_size = 2; tstate->interp->jit_state.code_max_size = UOP_MAX_TRACE_LENGTH - 1; } @@ -1077,9 +1084,6 @@ prepare_for_execution(_PyUOpInstruction *buffer, int length) for (int i = 0; i < length; i++) { _PyUOpInstruction *inst = &buffer[i]; int opcode = inst->opcode; - if (inst->format != UOP_FORMAT_TARGET) { - fprintf(stdout, "I: %d\n", i); - } int32_t target = (int32_t)uop_get_target(inst); uint16_t exit_flags = _PyUop_Flags[opcode] & (HAS_EXIT_FLAG | HAS_DEOPT_FLAG | HAS_PERIODIC_FLAG); if (exit_flags) { diff --git a/Python/optimizer_analysis.c b/Python/optimizer_analysis.c index e1a6f29f47d1..ad5f1b0388d5 100644 --- a/Python/optimizer_analysis.c +++ b/Python/optimizer_analysis.c @@ -499,6 +499,7 @@ remove_unneeded_uops(_PyUOpInstruction *buffer, int buffer_size) } case _JUMP_TO_TOP: case _DYNAMIC_EXIT: + case _DEOPT: return pc + 1; } } diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c index 98dabe989aed..fdc4e49aaf44 100644 --- a/Python/optimizer_bytecodes.c +++ b/Python/optimizer_bytecodes.c @@ -1022,6 +1022,10 @@ dummy_func(void) { ctx->done = true; } + op(_DEOPT, (--)) { + ctx->done = true; + } + op(_REPLACE_WITH_TRUE, (value -- res)) { REPLACE_OP(this_instr, _POP_TOP_LOAD_CONST_INLINE_BORROW, 0, (uintptr_t)Py_True); res = sym_new_const(ctx, Py_True); diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h index c12f9af02f87..397502cec784 100644 --- a/Python/optimizer_cases.c.h +++ b/Python/optimizer_cases.c.h @@ -3412,6 +3412,7 @@ } case _DEOPT: { + ctx->done = true; break; } -- 2.47.3