From: Ken Jin <28750310+Fidget-Spinner@users.noreply.github.com> Date: Mon, 10 Nov 2025 23:23:50 +0000 (+0000) Subject: Remove specialize_counter X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=08ec6005efa1120d1a9bcd6b92e540d47fb4c1fe;p=thirdparty%2FPython%2Fcpython.git Remove specialize_counter --- diff --git a/Include/internal/pycore_backoff.h b/Include/internal/pycore_backoff.h index 3762257f3173..71066f1bd9f1 100644 --- a/Include/internal/pycore_backoff.h +++ b/Include/internal/pycore_backoff.h @@ -95,6 +95,14 @@ backoff_counter_triggers(_Py_BackoffCounter counter) return counter.value_and_backoff < UNREACHABLE_BACKOFF; } +static inline _Py_BackoffCounter +trigger_backoff_counter(void) +{ + _Py_BackoffCounter result; + result.value_and_backoff = 0; + return result; +} + // Initial JUMP_BACKWARD counter. // Must be larger than ADAPTIVE_COOLDOWN_VALUE, otherwise when JIT code is // invalidated we may construct a new trace before the bytecode has properly @@ -134,14 +142,6 @@ initial_unreachable_backoff_counter(void) return make_backoff_counter(0, UNREACHABLE_BACKOFF); } -// Required to not get stuck in infinite specialization loops due to specialization failure. -// We use 2 here as there are a few scenarios: -// 1. Freshly specialized from unspecialized, in which case the counter will be 1. -// 2. Re-specialized from deopt, in which case the counter will be 1. -// 3. Deopt -> Specialize -> Deopt -> Specialize, in which case the counter will be 2. -// We do not want the 3rd case. -#define MAX_SPECIALIZATION_TRIES 2 - #ifdef __cplusplus } #endif diff --git a/Include/internal/pycore_tstate.h b/Include/internal/pycore_tstate.h index 04041b273d75..d2407df43c1a 100644 --- a/Include/internal/pycore_tstate.h +++ b/Include/internal/pycore_tstate.h @@ -41,7 +41,6 @@ typedef struct _PyJitTracerPreviousState { int code_curr_size; int instr_oparg; int instr_stacklevel; - int specialize_counter; _Py_CODEUNIT *instr; PyCodeObject *instr_code; // Strong struct _PyInterpreterFrame *instr_frame; diff --git a/Python/bytecodes.c b/Python/bytecodes.c index 0469e0c96080..c2d4725ccd45 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -5249,7 +5249,7 @@ dummy_func( ? _Py_INTERPRETER_TRAMPOLINE_INSTRUCTIONS_PTR : _PyFrame_GetBytecode(frame)) + exit->target; OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); - if (frame->lltrace >= 2) { + if (frame->lltrace >= 3) { printf("SIDE EXIT: [UOp "); _PyUOpPrint(&next_uop[-1]); printf(", exit %tu, temp %d, target %d -> %s, is_control_flow %d]\n", @@ -5267,7 +5267,7 @@ dummy_func( _PyExitData *exit = (_PyExitData *)exit_p; _Py_CODEUNIT *target = frame->instr_ptr; OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); - if (frame->lltrace >= 2) { + if (frame->lltrace >= 3) { printf("DYNAMIC EXIT: [UOp "); _PyUOpPrint(&next_uop[-1]); printf(", exit %tu, temp %d, target %d -> %s]\n", @@ -5663,7 +5663,6 @@ dummy_func( else { _tstate->jit_state.prev_state.instr = next_instr; } - _tstate->jit_state.prev_state.specialize_counter = 0; PyObject *prev_code = PyStackRef_AsPyObjectBorrow(frame->f_executable); if (_tstate->jit_state.prev_state.instr_code != (PyCodeObject *)prev_code) { Py_SETREF(_tstate->jit_state.prev_state.instr_code, (PyCodeObject*)Py_NewRef((prev_code))); @@ -5672,6 +5671,9 @@ dummy_func( _tstate->jit_state.prev_state.instr_frame = frame; _tstate->jit_state.prev_state.instr_oparg = oparg; _tstate->jit_state.prev_state.instr_stacklevel = PyStackRef_IsNone(frame->f_executable) ? 2 : STACK_LEVEL(); + if (_PyOpcode_Caches[_PyOpcode_Deopt[opcode]]) { + (&next_instr[1])->counter = trigger_backoff_counter(); + } DISPATCH_GOTO_NON_TRACING(); #else Py_FatalError("JIT label executed in non-jit build."); diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h index c1062e9d091d..a812c234cb2e 100644 --- a/Python/ceval_macros.h +++ b/Python/ceval_macros.h @@ -134,14 +134,12 @@ #if (_Py_TAIL_CALL_INTERP || USE_COMPUTED_GOTOS) && _Py_TIER2 # define IS_JIT_TRACING() (DISPATCH_TABLE_VAR == TRACING_DISPATCH_TABLE) -# define IS_JIT_TRACING_MAKING_PROGRESS() (IS_JIT_TRACING() && ((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter < MAX_SPECIALIZATION_TRIES) # define ENTER_TRACING() \ DISPATCH_TABLE_VAR = TRACING_DISPATCH_TABLE; # define LEAVE_TRACING() \ DISPATCH_TABLE_VAR = DISPATCH_TABLE; #else # define IS_JIT_TRACING() (0) -# define IS_JIT_TRACING_MAKING_PROGRESS() (0) # define ENTER_TRACING() # define LEAVE_TRACING() #endif @@ -308,7 +306,7 @@ GETITEM(PyObject *v, Py_ssize_t i) { * which is always an integral type. */ // Force re-specialization when tracing a side exit to get good side exits. #define ADAPTIVE_COUNTER_TRIGGERS(COUNTER) \ - backoff_counter_triggers(forge_backoff_counter((COUNTER))) || IS_JIT_TRACING_MAKING_PROGRESS() + backoff_counter_triggers(forge_backoff_counter((COUNTER))) #define ADVANCE_ADAPTIVE_COUNTER(COUNTER) \ do { \ diff --git a/Python/executor_cases.c.h b/Python/executor_cases.c.h index 84ce8f862a9d..7ba2e9d0d929 100644 --- a/Python/executor_cases.c.h +++ b/Python/executor_cases.c.h @@ -7114,7 +7114,7 @@ ? _Py_INTERPRETER_TRAMPOLINE_INSTRUCTIONS_PTR : _PyFrame_GetBytecode(frame)) + exit->target; OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); - if (frame->lltrace >= 2) { + if (frame->lltrace >= 3) { _PyFrame_SetStackPointer(frame, stack_pointer); printf("SIDE EXIT: [UOp "); _PyUOpPrint(&next_uop[-1]); @@ -7136,7 +7136,7 @@ _PyExitData *exit = (_PyExitData *)exit_p; _Py_CODEUNIT *target = frame->instr_ptr; OPT_HIST(trace_uop_execution_counter, trace_run_length_hist); - if (frame->lltrace >= 2) { + if (frame->lltrace >= 3) { _PyFrame_SetStackPointer(frame, stack_pointer); printf("DYNAMIC EXIT: [UOp "); _PyUOpPrint(&next_uop[-1]); diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index f2f562a134cf..241d2fd71e66 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -44,9 +44,6 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, LOCALS_ARRAY); stack_pointer = _PyFrame_GetStackPointer(frame); - #if _Py_TIER2 - ((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter++; - #endif DISPATCH_SAME_OPARG(); } OPCODE_DEFERRED_INC(BINARY_OP); @@ -1538,9 +1535,6 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _Py_Specialize_Call(callable, next_instr, oparg + !PyStackRef_IsNull(self_or_null)); stack_pointer = _PyFrame_GetStackPointer(frame); - #if _Py_TIER2 - ((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter++; - #endif DISPATCH_SAME_OPARG(); } OPCODE_DEFERRED_INC(CALL); @@ -2831,9 +2825,6 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _Py_Specialize_CallKw(callable, next_instr, oparg + !PyStackRef_IsNull(self_or_null)); stack_pointer = _PyFrame_GetStackPointer(frame); - #if _Py_TIER2 - ((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter++; - #endif DISPATCH_SAME_OPARG(); } OPCODE_DEFERRED_INC(CALL_KW); @@ -4677,9 +4668,6 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _Py_Specialize_CompareOp(left, right, next_instr, oparg); stack_pointer = _PyFrame_GetStackPointer(frame); - #if _Py_TIER2 - ((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter++; - #endif DISPATCH_SAME_OPARG(); } OPCODE_DEFERRED_INC(COMPARE_OP); @@ -4925,9 +4913,6 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _Py_Specialize_ContainsOp(right, next_instr); stack_pointer = _PyFrame_GetStackPointer(frame); - #if _Py_TIER2 - ((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter++; - #endif DISPATCH_SAME_OPARG(); } OPCODE_DEFERRED_INC(CONTAINS_OP); @@ -5656,9 +5641,6 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _Py_Specialize_ForIter(iter, null_or_index, next_instr, oparg); stack_pointer = _PyFrame_GetStackPointer(frame); - #if _Py_TIER2 - ((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter++; - #endif DISPATCH_SAME_OPARG(); } OPCODE_DEFERRED_INC(FOR_ITER); @@ -7616,9 +7598,6 @@ uint8_t desired = tstate->interp->jit ? JUMP_BACKWARD_JIT : JUMP_BACKWARD_NO_JIT; FT_ATOMIC_STORE_UINT8_RELAXED(this_instr->op.code, desired); next_instr = this_instr; - #if _Py_TIER2 - ((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter++; - #endif DISPATCH_SAME_OPARG(); } #endif @@ -7841,9 +7820,6 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _Py_Specialize_LoadAttr(owner, next_instr, name); stack_pointer = _PyFrame_GetStackPointer(frame); - #if _Py_TIER2 - ((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter++; - #endif DISPATCH_SAME_OPARG(); } OPCODE_DEFERRED_INC(LOAD_ATTR); @@ -9149,9 +9125,6 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name); stack_pointer = _PyFrame_GetStackPointer(frame); - #if _Py_TIER2 - ((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter++; - #endif DISPATCH_SAME_OPARG(); } OPCODE_DEFERRED_INC(LOAD_GLOBAL); @@ -9472,9 +9445,6 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _Py_Specialize_LoadSuperAttr(global_super_st, class_st, next_instr, load_method); stack_pointer = _PyFrame_GetStackPointer(frame); - #if _Py_TIER2 - ((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter++; - #endif DISPATCH_SAME_OPARG(); } OPCODE_DEFERRED_INC(LOAD_SUPER_ATTR); @@ -10460,9 +10430,6 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _Py_Specialize_Send(receiver, next_instr); stack_pointer = _PyFrame_GetStackPointer(frame); - #if _Py_TIER2 - ((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter++; - #endif DISPATCH_SAME_OPARG(); } OPCODE_DEFERRED_INC(SEND); @@ -10762,9 +10729,6 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _Py_Specialize_StoreAttr(owner, next_instr, name); stack_pointer = _PyFrame_GetStackPointer(frame); - #if _Py_TIER2 - ((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter++; - #endif DISPATCH_SAME_OPARG(); } OPCODE_DEFERRED_INC(STORE_ATTR); @@ -11262,9 +11226,6 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _Py_Specialize_StoreSubscr(container, sub, next_instr); stack_pointer = _PyFrame_GetStackPointer(frame); - #if _Py_TIER2 - ((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter++; - #endif DISPATCH_SAME_OPARG(); } OPCODE_DEFERRED_INC(STORE_SUBSCR); @@ -11474,9 +11435,6 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _Py_Specialize_ToBool(value, next_instr); stack_pointer = _PyFrame_GetStackPointer(frame); - #if _Py_TIER2 - ((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter++; - #endif DISPATCH_SAME_OPARG(); } OPCODE_DEFERRED_INC(TO_BOOL); @@ -11857,9 +11815,6 @@ _PyFrame_SetStackPointer(frame, stack_pointer); _Py_Specialize_UnpackSequence(seq, next_instr, oparg); stack_pointer = _PyFrame_GetStackPointer(frame); - #if _Py_TIER2 - ((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter++; - #endif DISPATCH_SAME_OPARG(); } OPCODE_DEFERRED_INC(UNPACK_SEQUENCE); @@ -12335,7 +12290,6 @@ JUMP_TO_LABEL(error); else { _tstate->jit_state.prev_state.instr = next_instr; } - _tstate->jit_state.prev_state.specialize_counter = 0; PyObject *prev_code = PyStackRef_AsPyObjectBorrow(frame->f_executable); if (_tstate->jit_state.prev_state.instr_code != (PyCodeObject *)prev_code) { _PyFrame_SetStackPointer(frame, stack_pointer); @@ -12345,6 +12299,9 @@ JUMP_TO_LABEL(error); _tstate->jit_state.prev_state.instr_frame = frame; _tstate->jit_state.prev_state.instr_oparg = oparg; _tstate->jit_state.prev_state.instr_stacklevel = PyStackRef_IsNone(frame->f_executable) ? 2 : STACK_LEVEL(); + if (_PyOpcode_Caches[_PyOpcode_Deopt[opcode]]) { + (&next_instr[1])->counter = trigger_backoff_counter(); + } DISPATCH_GOTO_NON_TRACING(); #else Py_FatalError("JIT label executed in non-jit build."); diff --git a/Python/optimizer.c b/Python/optimizer.c index 64e57f26094d..c4da65c19ff9 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -129,6 +129,7 @@ _PyOptimizer_Optimize( if (_tstate->jit_state.initial_state.func == NULL) { return 0; } + assert(_tstate->jit_state.initial_state.func != NULL); interp->compiling = true; // The first executor in a chain and the MAX_CHAIN_DEPTH'th executor *must* // make progress in order to avoid infinite loops or excessively-long @@ -584,10 +585,6 @@ _PyJit_translate_single_bytecode_to_trace( // We must point to the first EXTENDED_ARG when deopting. int oparg = _tstate->jit_state.prev_state.instr_oparg; int opcode = this_instr->op.code; - // Failed specialization many times. Deopt! - if (_tstate->jit_state.prev_state.specialize_counter >= MAX_SPECIALIZATION_TRIES) { - opcode = _PyOpcode_Deopt[opcode]; - } int rewind_oparg = oparg; while (rewind_oparg > 255) { rewind_oparg >>= 8; @@ -736,6 +733,7 @@ _PyJit_translate_single_bytecode_to_trace( _Py_CODEUNIT *computed_jump_instr = computed_next_instr_without_modifiers + oparg; assert(next_instr == computed_next_instr || next_instr == computed_jump_instr); int jump_happened = computed_jump_instr == next_instr; + assert(jump_happened == (target_instr[1].cache & 1)); uint32_t uopcode = BRANCH_TO_GUARD[opcode - POP_JUMP_IF_FALSE][jump_happened]; ADD_TO_TRACE(uopcode, 0, 0, INSTR_IP(jump_happened ? computed_next_instr : computed_jump_instr, old_code)); break; @@ -974,7 +972,10 @@ _PyJit_TryInitializeTracing( return 0; } } - + PyObject *func = PyStackRef_AsPyObjectBorrow(frame->f_funcobj); + if (func == NULL) { + return 0; + } PyCodeObject *code = _PyFrame_GetCode(frame); #ifdef Py_DEBUG char *python_lltrace = Py_GETENV("PYTHON_LLTRACE"); @@ -999,13 +1000,12 @@ _PyJit_TryInitializeTracing( _tstate->jit_state.initial_state.start_instr = start_instr; _tstate->jit_state.initial_state.close_loop_instr = close_loop_instr; _tstate->jit_state.initial_state.code = (PyCodeObject *)Py_NewRef(code); - _tstate->jit_state.initial_state.func = (PyFunctionObject *)Py_XNewRef(PyStackRef_AsPyObjectBorrow(frame->f_funcobj)); + _tstate->jit_state.initial_state.func = (PyFunctionObject *)Py_NewRef(func); _tstate->jit_state.initial_state.exit = exit; _tstate->jit_state.initial_state.stack_depth = curr_stackdepth; _tstate->jit_state.initial_state.chain_depth = chain_depth; _tstate->jit_state.prev_state.instr_frame = frame; _tstate->jit_state.prev_state.dependencies_still_valid = true; - _tstate->jit_state.prev_state.specialize_counter = 0; _tstate->jit_state.prev_state.instr_code = (PyCodeObject *)Py_NewRef(_PyFrame_GetCode(frame)); _tstate->jit_state.prev_state.instr = curr_instr; _tstate->jit_state.prev_state.instr_frame = frame; @@ -1014,7 +1014,10 @@ _PyJit_TryInitializeTracing( _tstate->jit_state.prev_state.instr_is_super = false; assert(curr_instr->op.code == JUMP_BACKWARD_JIT || (exit != NULL)); _tstate->jit_state.initial_state.jump_backward_instr = curr_instr; - assert(curr_instr->op.code == JUMP_BACKWARD_JIT || (exit != NULL)); + + if (_PyOpcode_Caches[_PyOpcode_Deopt[close_loop_instr->op.code]]) { + close_loop_instr[1].counter = trigger_backoff_counter(); + } _Py_BloomFilter_Init(&_tstate->jit_state.prev_state.dependencies); return 1; } @@ -1366,9 +1369,10 @@ uop_optimize( assert(length < UOP_MAX_TRACE_LENGTH); OPT_STAT_INC(traces_created); if (!is_noopt) { - length = _Py_uop_analyze_and_optimize(_tstate->jit_state.initial_state.func, buffer, - length, - curr_stackentries, &new_dependencies); + length = _Py_uop_analyze_and_optimize( + _tstate->jit_state.initial_state.func, + buffer,length, + curr_stackentries, &new_dependencies); if (length <= 0) { return length; } diff --git a/Python/optimizer_analysis.c b/Python/optimizer_analysis.c index a1f8f40ed763..ded542ecc42c 100644 --- a/Python/optimizer_analysis.c +++ b/Python/optimizer_analysis.c @@ -360,9 +360,10 @@ optimize_uops( } if (ctx->contradiction) { // Attempted to push a "bottom" (contradiction) symbol onto the stack. - // This means that the abstract interpreter has hit unreachable code. + // This means that the abstract interpreter has optimized to trace + // to an unreachable estate. // We *could* generate an _EXIT_TRACE or _FATAL_ERROR here, but hitting - // bottom indicates type instability, so we are probably better off + // bottom usually indicates an optimizer bug, so we are probably better off // retrying later. DPRINTF(3, "\n"); DPRINTF(1, "Hit bottom in abstract interpreter\n"); diff --git a/Python/optimizer_bytecodes.c b/Python/optimizer_bytecodes.c index eca5c0b69696..b12ee02557d9 100644 --- a/Python/optimizer_bytecodes.c +++ b/Python/optimizer_bytecodes.c @@ -342,7 +342,6 @@ dummy_func(void) { int already_bool = optimize_to_bool(this_instr, ctx, value, &value); if (!already_bool) { sym_set_type(value, &PyBool_Type); - value = sym_new_truthiness(ctx, value, true); } } diff --git a/Python/optimizer_cases.c.h b/Python/optimizer_cases.c.h index 765303cdeb99..ce5332814ced 100644 --- a/Python/optimizer_cases.c.h +++ b/Python/optimizer_cases.c.h @@ -280,7 +280,6 @@ int already_bool = optimize_to_bool(this_instr, ctx, value, &value); if (!already_bool) { sym_set_type(value, &PyBool_Type); - value = sym_new_truthiness(ctx, value, true); } stack_pointer[-1] = value; break; diff --git a/Python/specialize.c b/Python/specialize.c index a1c5dedd6156..5b44432b8b72 100644 --- a/Python/specialize.c +++ b/Python/specialize.c @@ -2041,8 +2041,12 @@ specialize_method_descriptor(PyMethodDescrObject *descr, _Py_CODEUNIT *instr, bool pop = (next.op.code == POP_TOP); int oparg = instr->op.arg; if ((PyObject *)descr == list_append && oparg == 1 && pop) { - specialize(instr, CALL_LIST_APPEND); - return 0; + PyThreadState *tstate = PyThreadState_GET(); + PyObject *self = PyStackRef_AsPyObjectBorrow(tstate->current_frame->stackpointer[-2]); + if (PyList_CheckExact(self)) { + specialize(instr, CALL_LIST_APPEND); + return 0; + } } specialize(instr, CALL_METHOD_DESCRIPTOR_O); return 0; diff --git a/Tools/cases_generator/analyzer.py b/Tools/cases_generator/analyzer.py index 532d5df14f41..d39013db4f7f 100644 --- a/Tools/cases_generator/analyzer.py +++ b/Tools/cases_generator/analyzer.py @@ -702,6 +702,7 @@ NON_ESCAPING_FUNCTIONS = ( "_PyJit_TryInitializeTracing", "_Py_unset_eval_breaker_bit", "_Py_set_eval_breaker_bit", + "trigger_backoff_counter", ) diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py index 7e4f1bd5c88c..0b5f764ec52b 100644 --- a/Tools/cases_generator/generators_common.py +++ b/Tools/cases_generator/generators_common.py @@ -129,7 +129,6 @@ class Emitter: "DISPATCH": self.dispatch, "INSTRUCTION_SIZE": self.instruction_size, "stack_pointer": self.stack_pointer, - "DISPATCH_SAME_OPARG": self.dispatch_same_oparg, } self.out = out self.labels = labels @@ -150,26 +149,6 @@ class Emitter: self.emit(tkn) return False - def dispatch_same_oparg( - self, - tkn: Token, - tkn_iter: TokenIterator, - uop: CodeSection, - storage: Storage, - inst: Instruction | None, - ) -> bool: - assert isinstance(uop, Uop) - assert "specializing" in uop.annotations, uop.name - self.out.start_line() - self.emit("#if _Py_TIER2\n") - self.emit("((_PyThreadStateImpl *)tstate)->jit_state.prev_state.specialize_counter++;\n") - self.emit("#endif\n") - self.emit(tkn) - emit_to(self.out, tkn_iter, "SEMI") - self.emit(";\n") - self.out.start_line() - return False - def deopt_if( self, tkn: Token,