From: Ken Jin <28750310+Fidget-Spinner@users.noreply.github.com> Date: Fri, 7 Nov 2025 20:58:37 +0000 (+0000) Subject: massive refactoring 2 X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=fe3a6a1743711c17bc2370370f5bfe14724f0f61;p=thirdparty%2FPython%2Fcpython.git massive refactoring 2 --- diff --git a/Include/internal/pycore_interp_structs.h b/Include/internal/pycore_interp_structs.h index cac5e358a673..704a08424993 100644 --- a/Include/internal/pycore_interp_structs.h +++ b/Include/internal/pycore_interp_structs.h @@ -768,18 +768,20 @@ typedef struct _PyJitTracerState { _Py_CODEUNIT *close_loop_instr; _Py_CODEUNIT *jump_backward_instr; } initial_state; - bool dependencies_still_valid; - bool prev_instr_is_super; - int code_max_size; - int code_curr_size; - int prev_instr_oparg; - int prev_instr_stacklevel; - int specialize_counter; _PyUOpInstruction *code_buffer; - _Py_CODEUNIT *prev_instr; - PyCodeObject *prev_instr_code; // Strong - _PyInterpreterFrame *prev_instr_frame; - _PyBloomFilter dependencies; + struct { + bool dependencies_still_valid; + bool instr_is_super; + int code_max_size; + int code_curr_size; + int instr_oparg; + int instr_stacklevel; + int specialize_counter; + _Py_CODEUNIT *instr; + PyCodeObject *instr_code; // Strong + _PyInterpreterFrame *instr_frame; + _PyBloomFilter dependencies; + } prev_state;; } _PyJitTracerState; /* PyInterpreterState holds the global state for one of the runtime's diff --git a/Python/bytecodes.c b/Python/bytecodes.c index ac308c918d0f..42fb4170eae7 100644 --- a/Python/bytecodes.c +++ b/Python/bytecodes.c @@ -5673,24 +5673,24 @@ dummy_func( } // Super instructions. Instruction deopted. There's a mismatch in what the stack expects // in the optimizer. So we have to reflect in the trace correctly. - if ((tstate->interp->jit_state.prev_instr->op.code == CALL_LIST_APPEND && + if ((tstate->interp->jit_state.prev_state.instr->op.code == CALL_LIST_APPEND && opcode == POP_TOP) || - (tstate->interp->jit_state.prev_instr->op.code == BINARY_OP_INPLACE_ADD_UNICODE && + (tstate->interp->jit_state.prev_state.instr->op.code == BINARY_OP_INPLACE_ADD_UNICODE && opcode == STORE_FAST)) { - tstate->interp->jit_state.prev_instr_is_super = true; + tstate->interp->jit_state.prev_state.instr_is_super = true; } else { - tstate->interp->jit_state.prev_instr = next_instr; + tstate->interp->jit_state.prev_state.instr = next_instr; } - tstate->interp->jit_state.specialize_counter = 0; + tstate->interp->jit_state.prev_state.specialize_counter = 0; PyCodeObject *prev_code = (PyCodeObject *)Py_NewRef(PyStackRef_AsPyObjectBorrow(frame->f_executable)); - if (tstate->interp->jit_state.prev_instr_code != prev_code) { - Py_SETREF(tstate->interp->jit_state.prev_instr_code, prev_code); + if (tstate->interp->jit_state.prev_state.instr_code != prev_code) { + Py_SETREF(tstate->interp->jit_state.prev_state.instr_code, prev_code); } - tstate->interp->jit_state.prev_instr_frame = frame; - tstate->interp->jit_state.prev_instr_oparg = oparg; - tstate->interp->jit_state.prev_instr_stacklevel = PyStackRef_IsNone(frame->f_executable) ? 2 : STACK_LEVEL(); + tstate->interp->jit_state.prev_state.instr_frame = frame; + tstate->interp->jit_state.prev_state.instr_oparg = oparg; + tstate->interp->jit_state.prev_state.instr_stacklevel = PyStackRef_IsNone(frame->f_executable) ? 2 : STACK_LEVEL(); DISPATCH_GOTO_NON_TRACING(); #else Py_FatalError("JIT label executed in non-jit build."); diff --git a/Python/ceval.c b/Python/ceval.c index 1b4ce9233c19..ce6493fdd852 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -1017,7 +1017,7 @@ bail_tracing_and_jit(PyThreadState *tstate, _PyInterpreterFrame *frame) // to be valid to access. if (err <= 0) { // Some opcodes will forever be unchanged. Don't ever bother specializing for them ever again. - if (tstate->interp->jit_state.prev_instr->op.code == INTERPRETER_EXIT) { + if (tstate->interp->jit_state.prev_state.instr->op.code == INTERPRETER_EXIT) { exit->temperature = initial_unreachable_backoff_counter(); } else { diff --git a/Python/ceval_macros.h b/Python/ceval_macros.h index a8290242a5c1..219874ac240b 100644 --- a/Python/ceval_macros.h +++ b/Python/ceval_macros.h @@ -134,7 +134,7 @@ #if (_Py_TAIL_CALL_INTERP || USE_COMPUTED_GOTOS) && _Py_TIER2 # define IS_JIT_TRACING() (DISPATCH_TABLE_VAR == TRACING_DISPATCH_TABLE) -# define IS_JIT_TRACING_MAKING_PROGRESS() (IS_JIT_TRACING() && tstate->interp->jit_state.specialize_counter < MAX_SPECIALIZATION_TRIES) +# define IS_JIT_TRACING_MAKING_PROGRESS() (IS_JIT_TRACING() && tstate->interp->jit_state.prev_state.specialize_counter < MAX_SPECIALIZATION_TRIES) # define ENTER_TRACING() \ DISPATCH_TABLE_VAR = TRACING_DISPATCH_TABLE; # define LEAVE_TRACING() \ @@ -402,7 +402,7 @@ do { \ JUMP_TO_LABEL(error); \ } \ if (keep_tracing_bit) { \ - assert(tstate->interp->jit_state.code_curr_size == 2 || tstate->interp->jit_state.code_curr_size == 3); \ + assert(tstate->interp->jit_state.prev_state.code_curr_size == 2); \ ENTER_TRACING(); \ DISPATCH_NON_TRACING(); \ } \ diff --git a/Python/generated_cases.c.h b/Python/generated_cases.c.h index 33ac4152bd5e..24c4271c88fa 100644 --- a/Python/generated_cases.c.h +++ b/Python/generated_cases.c.h @@ -45,7 +45,7 @@ _Py_Specialize_BinaryOp(lhs, rhs, next_instr, oparg, LOCALS_ARRAY); stack_pointer = _PyFrame_GetStackPointer(frame); #if _Py_TIER2 - tstate->interp->jit_state.specialize_counter++; + tstate->interp->jit_state.prev_state.specialize_counter++; #endif DISPATCH_SAME_OPARG(); } @@ -1539,7 +1539,7 @@ _Py_Specialize_Call(callable, next_instr, oparg + !PyStackRef_IsNull(self_or_null)); stack_pointer = _PyFrame_GetStackPointer(frame); #if _Py_TIER2 - tstate->interp->jit_state.specialize_counter++; + tstate->interp->jit_state.prev_state.specialize_counter++; #endif DISPATCH_SAME_OPARG(); } @@ -2832,7 +2832,7 @@ _Py_Specialize_CallKw(callable, next_instr, oparg + !PyStackRef_IsNull(self_or_null)); stack_pointer = _PyFrame_GetStackPointer(frame); #if _Py_TIER2 - tstate->interp->jit_state.specialize_counter++; + tstate->interp->jit_state.prev_state.specialize_counter++; #endif DISPATCH_SAME_OPARG(); } @@ -4678,7 +4678,7 @@ _Py_Specialize_CompareOp(left, right, next_instr, oparg); stack_pointer = _PyFrame_GetStackPointer(frame); #if _Py_TIER2 - tstate->interp->jit_state.specialize_counter++; + tstate->interp->jit_state.prev_state.specialize_counter++; #endif DISPATCH_SAME_OPARG(); } @@ -4926,7 +4926,7 @@ _Py_Specialize_ContainsOp(right, next_instr); stack_pointer = _PyFrame_GetStackPointer(frame); #if _Py_TIER2 - tstate->interp->jit_state.specialize_counter++; + tstate->interp->jit_state.prev_state.specialize_counter++; #endif DISPATCH_SAME_OPARG(); } @@ -5665,7 +5665,7 @@ _Py_Specialize_ForIter(iter, null_or_index, next_instr, oparg); stack_pointer = _PyFrame_GetStackPointer(frame); #if _Py_TIER2 - tstate->interp->jit_state.specialize_counter++; + tstate->interp->jit_state.prev_state.specialize_counter++; #endif DISPATCH_SAME_OPARG(); } @@ -7647,7 +7647,7 @@ FT_ATOMIC_STORE_UINT8_RELAXED(this_instr->op.code, desired); next_instr = this_instr; #if _Py_TIER2 - tstate->interp->jit_state.specialize_counter++; + tstate->interp->jit_state.prev_state.specialize_counter++; #endif DISPATCH_SAME_OPARG(); } @@ -7880,7 +7880,7 @@ _Py_Specialize_LoadAttr(owner, next_instr, name); stack_pointer = _PyFrame_GetStackPointer(frame); #if _Py_TIER2 - tstate->interp->jit_state.specialize_counter++; + tstate->interp->jit_state.prev_state.specialize_counter++; #endif DISPATCH_SAME_OPARG(); } @@ -9188,7 +9188,7 @@ _Py_Specialize_LoadGlobal(GLOBALS(), BUILTINS(), next_instr, name); stack_pointer = _PyFrame_GetStackPointer(frame); #if _Py_TIER2 - tstate->interp->jit_state.specialize_counter++; + tstate->interp->jit_state.prev_state.specialize_counter++; #endif DISPATCH_SAME_OPARG(); } @@ -9511,7 +9511,7 @@ _Py_Specialize_LoadSuperAttr(global_super_st, class_st, next_instr, load_method); stack_pointer = _PyFrame_GetStackPointer(frame); #if _Py_TIER2 - tstate->interp->jit_state.specialize_counter++; + tstate->interp->jit_state.prev_state.specialize_counter++; #endif DISPATCH_SAME_OPARG(); } @@ -10499,7 +10499,7 @@ _Py_Specialize_Send(receiver, next_instr); stack_pointer = _PyFrame_GetStackPointer(frame); #if _Py_TIER2 - tstate->interp->jit_state.specialize_counter++; + tstate->interp->jit_state.prev_state.specialize_counter++; #endif DISPATCH_SAME_OPARG(); } @@ -10801,7 +10801,7 @@ _Py_Specialize_StoreAttr(owner, next_instr, name); stack_pointer = _PyFrame_GetStackPointer(frame); #if _Py_TIER2 - tstate->interp->jit_state.specialize_counter++; + tstate->interp->jit_state.prev_state.specialize_counter++; #endif DISPATCH_SAME_OPARG(); } @@ -11301,7 +11301,7 @@ _Py_Specialize_StoreSubscr(container, sub, next_instr); stack_pointer = _PyFrame_GetStackPointer(frame); #if _Py_TIER2 - tstate->interp->jit_state.specialize_counter++; + tstate->interp->jit_state.prev_state.specialize_counter++; #endif DISPATCH_SAME_OPARG(); } @@ -11513,7 +11513,7 @@ _Py_Specialize_ToBool(value, next_instr); stack_pointer = _PyFrame_GetStackPointer(frame); #if _Py_TIER2 - tstate->interp->jit_state.specialize_counter++; + tstate->interp->jit_state.prev_state.specialize_counter++; #endif DISPATCH_SAME_OPARG(); } @@ -11896,7 +11896,7 @@ _Py_Specialize_UnpackSequence(seq, next_instr, oparg); stack_pointer = _PyFrame_GetStackPointer(frame); #if _Py_TIER2 - tstate->interp->jit_state.specialize_counter++; + tstate->interp->jit_state.prev_state.specialize_counter++; #endif DISPATCH_SAME_OPARG(); } @@ -12360,25 +12360,25 @@ JUMP_TO_LABEL(error); } DISPATCH_GOTO_NON_TRACING(); } - if ((tstate->interp->jit_state.prev_instr->op.code == CALL_LIST_APPEND && + if ((tstate->interp->jit_state.prev_state.instr->op.code == CALL_LIST_APPEND && opcode == POP_TOP) || - (tstate->interp->jit_state.prev_instr->op.code == BINARY_OP_INPLACE_ADD_UNICODE && + (tstate->interp->jit_state.prev_state.instr->op.code == BINARY_OP_INPLACE_ADD_UNICODE && opcode == STORE_FAST)) { - tstate->interp->jit_state.prev_instr_is_super = true; + tstate->interp->jit_state.prev_state.instr_is_super = true; } else { - tstate->interp->jit_state.prev_instr = next_instr; + tstate->interp->jit_state.prev_state.instr = next_instr; } - tstate->interp->jit_state.specialize_counter = 0; + tstate->interp->jit_state.prev_state.specialize_counter = 0; PyCodeObject *prev_code = (PyCodeObject *)Py_NewRef(PyStackRef_AsPyObjectBorrow(frame->f_executable)); - if (tstate->interp->jit_state.prev_instr_code != prev_code) { + if (tstate->interp->jit_state.prev_state.instr_code != prev_code) { _PyFrame_SetStackPointer(frame, stack_pointer); - Py_SETREF(tstate->interp->jit_state.prev_instr_code, prev_code); + Py_SETREF(tstate->interp->jit_state.prev_state.instr_code, prev_code); stack_pointer = _PyFrame_GetStackPointer(frame); } - tstate->interp->jit_state.prev_instr_frame = frame; - tstate->interp->jit_state.prev_instr_oparg = oparg; - tstate->interp->jit_state.prev_instr_stacklevel = PyStackRef_IsNone(frame->f_executable) ? 2 : STACK_LEVEL(); + tstate->interp->jit_state.prev_state.instr_frame = frame; + tstate->interp->jit_state.prev_state.instr_oparg = oparg; + tstate->interp->jit_state.prev_state.instr_stacklevel = PyStackRef_IsNone(frame->f_executable) ? 2 : STACK_LEVEL(); DISPATCH_GOTO_NON_TRACING(); #else Py_FatalError("JIT label executed in non-jit build."); diff --git a/Python/optimizer.c b/Python/optimizer.c index 90bf7069d8fe..1e858ca3689a 100644 --- a/Python/optimizer.c +++ b/Python/optimizer.c @@ -142,7 +142,7 @@ _PyOptimizer_Optimize( return 0; } // One of our dependencies while tracing was invalidated. Not worth compiling. - if (!tstate->interp->jit_state.dependencies_still_valid) { + if (!tstate->interp->jit_state.prev_state.dependencies_still_valid) { interp->compiling = false; return 0; } @@ -562,7 +562,7 @@ _PyJit_translate_single_bytecode_to_trace( } #endif - PyCodeObject *old_code = tstate->interp->jit_state.prev_instr_code; + PyCodeObject *old_code = tstate->interp->jit_state.prev_state.instr_code; // Something else finalized the trace. This can happen in multi-threaded scenarios as our trace // addition from bytecode execution to here is not atomic. // Though in GIL builds, the GIL protects the rest. @@ -570,18 +570,18 @@ _PyJit_translate_single_bytecode_to_trace( return 0; } bool progress_needed = (tstate->interp->jit_state.initial_state.chain_depth % MAX_CHAIN_DEPTH) == 0; - _PyBloomFilter *dependencies = &tstate->interp->jit_state.dependencies; + _PyBloomFilter *dependencies = &tstate->interp->jit_state.prev_state.dependencies; _Py_BloomFilter_Add(dependencies, old_code); - int trace_length = tstate->interp->jit_state.code_curr_size; + int trace_length = tstate->interp->jit_state.prev_state.code_curr_size; _PyUOpInstruction *trace = tstate->interp->jit_state.code_buffer; - int max_length = tstate->interp->jit_state.code_max_size; + int max_length = tstate->interp->jit_state.prev_state.code_max_size; int is_sys_tracing = (tstate->c_tracefunc != NULL) || (tstate->c_profilefunc != NULL); if (is_sys_tracing) { goto full; } - _Py_CODEUNIT *this_instr = tstate->interp->jit_state.prev_instr; + _Py_CODEUNIT *this_instr = tstate->interp->jit_state.prev_state.instr; _Py_CODEUNIT *target_instr = this_instr; uint32_t target = 0; @@ -591,10 +591,10 @@ _PyJit_translate_single_bytecode_to_trace( // Rewind EXTENDED_ARG so that we see the whole thing. // We must point to the first EXTENDED_ARG when deopting. - int oparg = tstate->interp->jit_state.prev_instr_oparg; + int oparg = tstate->interp->jit_state.prev_state.instr_oparg; int opcode = this_instr->op.code; // Failed specialization many times. Deopt! - if (tstate->interp->jit_state.specialize_counter >= MAX_SPECIALIZATION_TRIES) { + if (tstate->interp->jit_state.prev_state.specialize_counter >= MAX_SPECIALIZATION_TRIES) { opcode = _PyOpcode_Deopt[opcode]; } int rewind_oparg = oparg; @@ -603,7 +603,7 @@ _PyJit_translate_single_bytecode_to_trace( target--; } - int old_stack_level = tstate->interp->jit_state.prev_instr_stacklevel; + int old_stack_level = tstate->interp->jit_state.prev_state.instr_stacklevel; // Strange control-flow bool has_dynamic_jump_taken = OPCODE_HAS_UNPREDICTABLE_JUMP(opcode) && @@ -611,7 +611,7 @@ _PyJit_translate_single_bytecode_to_trace( /* Special case the first instruction, * so that we can guarantee forward progress */ - if (progress_needed && tstate->interp->jit_state.code_curr_size <= 3) { + if (progress_needed && tstate->interp->jit_state.prev_state.code_curr_size <= 3) { if (OPCODE_HAS_EXIT(opcode) || OPCODE_HAS_DEOPT(opcode)) { opcode = _PyOpcode_Deopt[opcode]; } @@ -633,8 +633,8 @@ _PyJit_translate_single_bytecode_to_trace( #endif // Skip over super instructions. - if (tstate->interp->jit_state.prev_instr_is_super) { - tstate->interp->jit_state.prev_instr_is_super = false; + if (tstate->interp->jit_state.prev_state.instr_is_super) { + tstate->interp->jit_state.prev_state.instr_is_super = false; return 1; } @@ -642,13 +642,13 @@ _PyJit_translate_single_bytecode_to_trace( goto full; } - if (!tstate->interp->jit_state.dependencies_still_valid) { + if (!tstate->interp->jit_state.prev_state.dependencies_still_valid) { goto done; } // This happens when a recursive call happens that we can't trace. Such as Python -> C -> Python calls // If we haven't guarded the IP, then it's untraceable. - if (frame != tstate->interp->jit_state.prev_instr_frame && !needs_guard_ip) { + if (frame != tstate->interp->jit_state.prev_state.instr_frame && !needs_guard_ip) { DPRINTF(2, "Unsupported: unguardable jump taken\n"); goto unsupported; } @@ -750,7 +750,7 @@ _PyJit_translate_single_bytecode_to_trace( { if ((next_instr != tstate->interp->jit_state.initial_state.close_loop_instr) && (next_instr != tstate->interp->jit_state.initial_state.start_instr) && - tstate->interp->jit_state.code_curr_size > 5 && + tstate->interp->jit_state.prev_state.code_curr_size > 5 && // These are coroutines, and we want to unroll those usually. opcode != JUMP_BACKWARD_NO_INTERRUPT) { // We encountered a JUMP_BACKWARD but not to the top of our own loop. @@ -918,7 +918,7 @@ _PyJit_translate_single_bytecode_to_trace( // Loop back to the start int is_first_instr = tstate->interp->jit_state.initial_state.close_loop_instr == next_instr || tstate->interp->jit_state.initial_state.start_instr == next_instr; - if (is_first_instr && tstate->interp->jit_state.code_curr_size > 5) { + if (is_first_instr && tstate->interp->jit_state.prev_state.code_curr_size > 5) { if (needs_guard_ip) { ADD_TO_TRACE(_SET_IP, 0, (uintptr_t)next_instr, 0); } @@ -926,27 +926,27 @@ _PyJit_translate_single_bytecode_to_trace( goto done; } DPRINTF(2, "Trace continuing\n"); - tstate->interp->jit_state.code_curr_size = trace_length; - tstate->interp->jit_state.code_max_size = max_length; + tstate->interp->jit_state.prev_state.code_curr_size = trace_length; + tstate->interp->jit_state.prev_state.code_max_size = max_length; return 1; done: DPRINTF(2, "Trace done\n"); - tstate->interp->jit_state.code_curr_size = trace_length; - tstate->interp->jit_state.code_max_size = max_length; + tstate->interp->jit_state.prev_state.code_curr_size = trace_length; + tstate->interp->jit_state.prev_state.code_max_size = max_length; return 0; full: DPRINTF(2, "Trace full\n"); if (!is_terminator(&tstate->interp->jit_state.code_buffer[trace_length-1])) { // Undo the last few instructions. - trace_length = tstate->interp->jit_state.code_curr_size; - max_length = tstate->interp->jit_state.code_max_size; + trace_length = tstate->interp->jit_state.prev_state.code_curr_size; + max_length = tstate->interp->jit_state.prev_state.code_max_size; // We previously reversed one. max_length += 1; ADD_TO_TRACE(_EXIT_TRACE, 0, 0, target); trace[trace_length-1].operand1 = true; // is_control_flow } - tstate->interp->jit_state.code_curr_size = trace_length; - tstate->interp->jit_state.code_max_size = max_length; + tstate->interp->jit_state.prev_state.code_curr_size = trace_length; + tstate->interp->jit_state.prev_state.code_max_size = max_length; return 0; } @@ -959,7 +959,7 @@ _PyJit_TryInitializeTracing( { // A recursive trace. // Don't trace into the inner call because it will stomp on the previous trace, causing endless retraces. - if (tstate->interp->jit_state.code_curr_size > 2) { + if (tstate->interp->jit_state.prev_state.code_curr_size > 2) { return 0; } if (oparg > 0xFFFF) { @@ -984,9 +984,9 @@ _PyJit_TryInitializeTracing( add_to_trace(tstate->interp->jit_state.code_buffer, 0, _START_EXECUTOR, 0, (uintptr_t)start_instr, INSTR_IP(start_instr, code)); add_to_trace(tstate->interp->jit_state.code_buffer, 1, _MAKE_WARM, 0, 0, 0); - tstate->interp->jit_state.code_curr_size = 2; + tstate->interp->jit_state.prev_state.code_curr_size = 2; - tstate->interp->jit_state.code_max_size = UOP_MAX_TRACE_LENGTH; + tstate->interp->jit_state.prev_state.code_max_size = UOP_MAX_TRACE_LENGTH; tstate->interp->jit_state.initial_state.start_instr = start_instr; tstate->interp->jit_state.initial_state.close_loop_instr = close_loop_instr; tstate->interp->jit_state.initial_state.code = (PyCodeObject *)Py_NewRef(code); @@ -994,19 +994,19 @@ _PyJit_TryInitializeTracing( tstate->interp->jit_state.initial_state.exit = exit; tstate->interp->jit_state.initial_state.stack_depth = curr_stackdepth; tstate->interp->jit_state.initial_state.chain_depth = chain_depth; - tstate->interp->jit_state.prev_instr_frame = frame; - tstate->interp->jit_state.dependencies_still_valid = true; - tstate->interp->jit_state.specialize_counter = 0; - tstate->interp->jit_state.prev_instr_code = (PyCodeObject *)Py_NewRef(_PyFrame_GetCode(frame)); - tstate->interp->jit_state.prev_instr = curr_instr; - tstate->interp->jit_state.prev_instr_frame = frame; - tstate->interp->jit_state.prev_instr_oparg = oparg; - tstate->interp->jit_state.prev_instr_stacklevel = curr_stackdepth; - tstate->interp->jit_state.prev_instr_is_super = false; + tstate->interp->jit_state.prev_state.instr_frame = frame; + tstate->interp->jit_state.prev_state.dependencies_still_valid = true; + tstate->interp->jit_state.prev_state.specialize_counter = 0; + tstate->interp->jit_state.prev_state.instr_code = (PyCodeObject *)Py_NewRef(_PyFrame_GetCode(frame)); + tstate->interp->jit_state.prev_state.instr = curr_instr; + tstate->interp->jit_state.prev_state.instr_frame = frame; + tstate->interp->jit_state.prev_state.instr_oparg = oparg; + tstate->interp->jit_state.prev_state.instr_stacklevel = curr_stackdepth; + tstate->interp->jit_state.prev_state.instr_is_super = false; assert(curr_instr->op.code == JUMP_BACKWARD_JIT || (exit != NULL)); tstate->interp->jit_state.initial_state.jump_backward_instr = curr_instr; assert(curr_instr->op.code == JUMP_BACKWARD_JIT || (exit != NULL)); - _Py_BloomFilter_Init(&tstate->interp->jit_state.dependencies); + _Py_BloomFilter_Init(&tstate->interp->jit_state.prev_state.dependencies); return 1; } @@ -1015,9 +1015,9 @@ _PyJit_FinalizeTracing(PyThreadState *tstate) { Py_CLEAR(tstate->interp->jit_state.initial_state.code); Py_CLEAR(tstate->interp->jit_state.initial_state.func); - Py_CLEAR(tstate->interp->jit_state.prev_instr_code); - tstate->interp->jit_state.code_curr_size = 2; - tstate->interp->jit_state.code_max_size = UOP_MAX_TRACE_LENGTH - 1; + Py_CLEAR(tstate->interp->jit_state.prev_state.instr_code); + tstate->interp->jit_state.prev_state.code_curr_size = 2; + tstate->interp->jit_state.prev_state.code_max_size = UOP_MAX_TRACE_LENGTH - 1; } @@ -1347,7 +1347,7 @@ uop_optimize( is_noopt = false; } int curr_stackentries = tstate->interp->jit_state.initial_state.stack_depth; - int length = interp->jit_state.code_curr_size; + int length = interp->jit_state.prev_state.code_curr_size; // Trace too short, don't bother. if (length <= 5) { return 0; @@ -1725,9 +1725,9 @@ _PyJit_Tracer_InvalidateDependency(PyThreadState *tstate, void *obj) _Py_BloomFilter_Init(&obj_filter); _Py_BloomFilter_Add(&obj_filter, obj); - if (bloom_filter_may_contain(&tstate->interp->jit_state.dependencies, &obj_filter)) + if (bloom_filter_may_contain(&tstate->interp->jit_state.prev_state.dependencies, &obj_filter)) { - tstate->interp->jit_state.dependencies_still_valid = false; + tstate->interp->jit_state.prev_state.dependencies_still_valid = false; } } /* Invalidate all executors */ diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py index 67eba7dec09c..4ede31992a7d 100644 --- a/Tools/cases_generator/generators_common.py +++ b/Tools/cases_generator/generators_common.py @@ -162,7 +162,7 @@ class Emitter: assert "specializing" in uop.annotations, uop.name self.out.start_line() self.emit("#if _Py_TIER2\n") - self.emit("tstate->interp->jit_state.specialize_counter++;\n") + self.emit("tstate->interp->jit_state.prev_state.specialize_counter++;\n") self.emit("#endif\n") self.emit(tkn) emit_to(self.out, tkn_iter, "SEMI")