From: Ken Jin <28750310+Fidget-Spinner@users.noreply.github.com> Date: Tue, 21 Oct 2025 21:36:15 +0000 (+0100) Subject: Clean up the cases generator X-Git-Url: http://git.ipfire.org/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=e4f162432afd966090f9e2cdf7de99b4614121ba;p=thirdparty%2FPython%2Fcpython.git Clean up the cases generator --- diff --git a/Python/generated_tracer_cases.c.h b/Python/generated_tracer_cases.c.h index 6317b02ddb93..932a57a22e51 100644 --- a/Python/generated_tracer_cases.c.h +++ b/Python/generated_tracer_cases.c.h @@ -1797,7 +1797,6 @@ (void)_jump_taken; int _old_stack_level = !PyStackRef_IsNull(frame->f_executable) ? STACK_LEVEL() : 0; (void)(_old_stack_level); - opcode = CALL; _PyStackRef callable; _PyStackRef self_or_null; _PyStackRef *args; @@ -2853,7 +2852,6 @@ (void)_jump_taken; int _old_stack_level = !PyStackRef_IsNull(frame->f_executable) ? STACK_LEVEL() : 0; (void)(_old_stack_level); - opcode = CALL_FUNCTION_EX; _PyStackRef func; _PyStackRef callargs; _PyStackRef func_st; @@ -3210,7 +3208,6 @@ (void)_jump_taken; int _old_stack_level = !PyStackRef_IsNull(frame->f_executable) ? STACK_LEVEL() : 0; (void)(_old_stack_level); - opcode = CALL_KW; _PyStackRef callable; _PyStackRef self_or_null; _PyStackRef *args; @@ -3541,7 +3538,6 @@ (void)_jump_taken; int _old_stack_level = !PyStackRef_IsNull(frame->f_executable) ? STACK_LEVEL() : 0; (void)(_old_stack_level); - opcode = CALL_KW_NON_PY; static_assert(INLINE_CACHE_ENTRIES_CALL_KW == 3, "incorrect cache size"); _PyStackRef callable; _PyStackRef self_or_null; @@ -4461,7 +4457,6 @@ (void)_jump_taken; int _old_stack_level = !PyStackRef_IsNull(frame->f_executable) ? STACK_LEVEL() : 0; (void)(_old_stack_level); - opcode = CALL_NON_PY_GENERAL; static_assert(INLINE_CACHE_ENTRIES_CALL == 3, "incorrect cache size"); _PyStackRef callable; _PyStackRef self_or_null; @@ -6307,7 +6302,6 @@ (void)_jump_taken; int _old_stack_level = !PyStackRef_IsNull(frame->f_executable) ? STACK_LEVEL() : 0; (void)(_old_stack_level); - opcode = ENTER_EXECUTOR; #ifdef _Py_TIER2 PyCodeObject *code = _PyFrame_GetCode(frame); _PyExecutorObject *executor = code->co_executors->executors[oparg & 255]; @@ -6391,7 +6385,6 @@ (void)_jump_taken; int _old_stack_level = !PyStackRef_IsNull(frame->f_executable) ? STACK_LEVEL() : 0; (void)(_old_stack_level); - opcode = EXTENDED_ARG; assert(oparg); opcode = next_instr->op.code; oparg = oparg << 8 | next_instr->op.arg; @@ -7294,7 +7287,6 @@ (void)_jump_taken; int _old_stack_level = !PyStackRef_IsNull(frame->f_executable) ? STACK_LEVEL() : 0; (void)(_old_stack_level); - opcode = INSTRUMENTED_CALL; _PyStackRef callable; _PyStackRef self_or_null; _PyStackRef func; @@ -7492,7 +7484,6 @@ (void)_jump_taken; int _old_stack_level = !PyStackRef_IsNull(frame->f_executable) ? STACK_LEVEL() : 0; (void)(_old_stack_level); - opcode = INSTRUMENTED_CALL_FUNCTION_EX; _PyStackRef func; _PyStackRef callargs; _PyStackRef func_st; @@ -7669,7 +7660,6 @@ (void)_jump_taken; int _old_stack_level = !PyStackRef_IsNull(frame->f_executable) ? STACK_LEVEL() : 0; (void)(_old_stack_level); - opcode = INSTRUMENTED_CALL_KW; _PyStackRef callable; _PyStackRef self_or_null; _PyStackRef *args; @@ -8057,7 +8047,6 @@ (void)_jump_taken; int _old_stack_level = !PyStackRef_IsNull(frame->f_executable) ? STACK_LEVEL() : 0; (void)(_old_stack_level); - opcode = INSTRUMENTED_INSTRUCTION; _PyFrame_SetStackPointer(frame, stack_pointer); int next_opcode = _Py_call_instrumentation_instruction( tstate, frame, this_instr); @@ -8156,7 +8145,6 @@ (void)_jump_taken; int _old_stack_level = !PyStackRef_IsNull(frame->f_executable) ? STACK_LEVEL() : 0; (void)(_old_stack_level); - opcode = INSTRUMENTED_LINE; int original_opcode = 0; if (tstate->tracing) { PyCodeObject *code = _PyFrame_GetCode(frame); @@ -8205,7 +8193,6 @@ (void)_jump_taken; int _old_stack_level = !PyStackRef_IsNull(frame->f_executable) ? STACK_LEVEL() : 0; (void)(_old_stack_level); - opcode = INSTRUMENTED_LOAD_SUPER_ATTR; _PyStackRef global_super_st; _PyStackRef class_st; _PyStackRef self_st; @@ -11163,7 +11150,6 @@ (void)_jump_taken; int _old_stack_level = !PyStackRef_IsNull(frame->f_executable) ? STACK_LEVEL() : 0; (void)(_old_stack_level); - opcode = LOAD_SUPER_ATTR; _PyStackRef global_super_st; _PyStackRef class_st; _PyStackRef self_st; diff --git a/Tools/cases_generator/generators_common.py b/Tools/cases_generator/generators_common.py index 2d5f4607ab46..3edefb110834 100644 --- a/Tools/cases_generator/generators_common.py +++ b/Tools/cases_generator/generators_common.py @@ -107,9 +107,9 @@ class Emitter: labels: dict[str, Label] _replacers: dict[str, ReplacementFunctionType] cannot_escape: bool - tracing: str + jump_prefix: str - def __init__(self, out: CWriter, labels: dict[str, Label], cannot_escape: bool = False, is_tracing: bool = False): + def __init__(self, out: CWriter, labels: dict[str, Label], cannot_escape: bool = False, jump_prefix: str = ""): self._replacers = { "EXIT_IF": self.exit_if, "AT_END_EXIT_IF": self.exit_if_after, @@ -133,7 +133,7 @@ class Emitter: self.out = out self.labels = labels self.cannot_escape = cannot_escape - self.tracing = "TRACING_" if is_tracing else "" + self.jump_prefix = jump_prefix def dispatch( self, @@ -170,7 +170,7 @@ class Emitter: family_name = inst.family.name self.emit(f"UPDATE_MISS_STATS({family_name});\n") self.emit(f"assert(_PyOpcode_Deopt[opcode] == ({family_name}));\n") - self.emit(f"JUMP_TO_PREDICTED({family_name});\n") + self.emit(f"JUMP_TO_PREDICTED({self.jump_prefix}{family_name});\n") self.emit("}\n") return not always_true(first_tkn) @@ -201,10 +201,10 @@ class Emitter: def goto_error(self, offset: int, storage: Storage) -> str: if offset > 0: - return f"{self.tracing}JUMP_TO_LABEL(pop_{offset}_error);" + return f"{self.jump_prefix}JUMP_TO_LABEL(pop_{offset}_error);" if offset < 0: storage.copy().flush(self.out) - return f"{self.tracing}JUMP_TO_LABEL(error);" + return f"{self.jump_prefix}JUMP_TO_LABEL(error);" def error_if( self, @@ -424,7 +424,7 @@ class Emitter: elif storage.spilled: raise analysis_error("Cannot jump from spilled label without reloading the stack pointer", goto) self.out.start_line() - self.out.emit(f"{self.tracing}JUMP_TO_LABEL(") + self.out.emit(f"{self.jump_prefix}JUMP_TO_LABEL(") self.out.emit(label) self.out.emit(")") diff --git a/Tools/cases_generator/tier1_generator.py b/Tools/cases_generator/tier1_generator.py index c77074f656c9..25a7442a0ef6 100644 --- a/Tools/cases_generator/tier1_generator.py +++ b/Tools/cases_generator/tier1_generator.py @@ -217,15 +217,12 @@ def get_popped(inst: Instruction, analysis: Analysis) -> str: return (-stack.base_offset).to_c() def generate_tier1_cases( - analysis: Analysis, out: CWriter, emitter: Emitter, is_tracing: bool = False + analysis: Analysis, out: CWriter, emitter: Emitter ) -> None: - tracing_prepend = "TRACING_" if is_tracing else "" out.emit("\n") for name, inst in sorted(analysis.instructions.items()): out.emit("\n") - out.emit(f"{tracing_prepend}TARGET({name}) {{\n") - if is_tracing: - out.emit(f"assert(IS_JIT_TRACING());\n") + out.emit(f"TARGET({name}) {{\n") popped = get_popped(inst, analysis) # We need to ifdef it because this breaks platforms # without computed gotos/tail calling. @@ -233,7 +230,7 @@ def generate_tier1_cases( out.emit(f"int opcode = {name};\n") out.emit(f"(void)(opcode);\n") out.emit(f"#endif\n") - needs_this = is_tracing or uses_this(inst) + needs_this = uses_this(inst) unused_guard = "(void)this_instr;\n" if inst.properties.needs_prev: out.emit(f"_Py_CODEUNIT* const prev_instr = frame->instr_ptr;\n") @@ -247,21 +244,10 @@ def generate_tier1_cases( out.emit(f"next_instr += {inst.size};\n") out.emit(f"INSTRUCTION_STATS({name});\n") if inst.is_target: - out.emit(f"PREDICTED_{tracing_prepend}{name}:;\n") + out.emit(f"PREDICTED_{name}:;\n") if needs_this: out.emit(f"_Py_CODEUNIT* const this_instr = next_instr - {inst.size};\n") out.emit(unused_guard) - if is_tracing: - # This is required so that the predicted ops reflect the correct opcode. - out.emit(f"opcode = {name};\n") - out.emit(f"PyCodeObject *old_code = (PyCodeObject *)PyStackRef_AsPyObjectBorrow(frame->f_executable);\n") - out.emit(f"(void)old_code;\n") - out.emit(f"PyFunctionObject *old_func = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(frame->f_funcobj);\n") - out.emit(f"(void)old_func;\n") - out.emit(f"int _jump_taken = false;\n") - out.emit(f"(void)_jump_taken;\n") - out.emit(f"int _old_stack_level = !PyStackRef_IsNull(frame->f_executable) ? STACK_LEVEL() : 0;\n") - out.emit(f"(void)(_old_stack_level);\n") if inst.properties.uses_opcode: out.emit(f"opcode = {name};\n") if inst.family is not None: @@ -279,7 +265,7 @@ def generate_tier1_cases( out.start_line() if reachable: # type: ignore[possibly-undefined] stack.flush(out) - out.emit(f"{tracing_prepend}DISPATCH();\n") + out.emit(f"DISPATCH();\n") out.start_line() out.emit("}") out.emit("\n") diff --git a/Tools/cases_generator/tracer_generator.py b/Tools/cases_generator/tracer_generator.py index 7227cca90f51..1b7d9a5acea6 100644 --- a/Tools/cases_generator/tracer_generator.py +++ b/Tools/cases_generator/tracer_generator.py @@ -28,7 +28,7 @@ from cwriter import CWriter from typing import TextIO from lexer import Token from stack import Local, Stack, StackError, get_stack_effect, Storage -from tier1_generator import generate_tier1_cases +from tier1_generator import get_popped, declare_variables, write_uop DEFAULT_OUTPUT = ROOT / "Python/generated_tracer_cases.c.h" @@ -39,7 +39,7 @@ class TracerEmitter(Emitter): cannot_escape: bool def __init__(self, out: CWriter, labels: dict[str, Label], cannot_escape: bool = False): - super().__init__(out, labels, cannot_escape, is_tracing=True) + super().__init__(out, labels, cannot_escape, jump_prefix="TRACING_") self._replacers = { **self._replacers, "DISPATCH": self.dispatch, @@ -109,38 +109,71 @@ class TracerEmitter(Emitter): self.out.emit(";\n") return True - def deopt_if( - self, - tkn: Token, - tkn_iter: TokenIterator, - uop: CodeSection, - storage: Storage, - inst: Instruction | None, - ) -> bool: - self.out.start_line() - self.out.emit("if (") - lparen = next(tkn_iter) - assert lparen.kind == "LPAREN" - first_tkn = tkn_iter.peek() - emit_to(self.out, tkn_iter, "RPAREN") - self.emit(") {\n") - next(tkn_iter) # Semi colon - assert inst is not None - assert inst.family is not None - family_name = inst.family.name - self.emit(f"UPDATE_MISS_STATS({family_name});\n") - self.emit(f"assert(_PyOpcode_Deopt[opcode] == ({family_name}));\n") - self.emit(f"JUMP_TO_PREDICTED(TRACING_{family_name});\n") - self.emit("}\n") - return not always_true(first_tkn) - - exit_if = deopt_if +def generate_tier1_tracer_cases( + analysis: Analysis, out: CWriter, emitter: Emitter +) -> None: + out.emit("\n") + for name, inst in sorted(analysis.instructions.items()): + out.emit("\n") + out.emit(f"TRACING_TARGET({name}) {{\n") + out.emit(f"assert(IS_JIT_TRACING());\n") + # We need to ifdef it because this breaks platforms + # without computed gotos/tail calling. + out.emit(f"#if _Py_TAIL_CALL_INTERP\n") + out.emit(f"int opcode = {name};\n") + out.emit(f"(void)(opcode);\n") + out.emit(f"#endif\n") + unused_guard = "(void)this_instr;\n" + if inst.properties.needs_prev: + out.emit(f"_Py_CODEUNIT* const prev_instr = frame->instr_ptr;\n") + if not inst.is_target: + out.emit(f"_Py_CODEUNIT* const this_instr = next_instr;\n") + out.emit(unused_guard) + if not inst.properties.no_save_ip: + out.emit(f"frame->instr_ptr = next_instr;\n") + + out.emit(f"next_instr += {inst.size};\n") + out.emit(f"INSTRUCTION_STATS({name});\n") + if inst.is_target: + out.emit(f"PREDICTED_TRACING_{name}:;\n") + out.emit(f"_Py_CODEUNIT* const this_instr = next_instr - {inst.size};\n") + out.emit(unused_guard) + # This is required so that the predicted ops reflect the correct opcode. + out.emit(f"opcode = {name};\n") + out.emit(f"PyCodeObject *old_code = (PyCodeObject *)PyStackRef_AsPyObjectBorrow(frame->f_executable);\n") + out.emit(f"(void)old_code;\n") + out.emit(f"PyFunctionObject *old_func = (PyFunctionObject *)PyStackRef_AsPyObjectBorrow(frame->f_funcobj);\n") + out.emit(f"(void)old_func;\n") + out.emit(f"int _jump_taken = false;\n") + out.emit(f"(void)_jump_taken;\n") + out.emit(f"int _old_stack_level = !PyStackRef_IsNull(frame->f_executable) ? STACK_LEVEL() : 0;\n") + out.emit(f"(void)(_old_stack_level);\n") + if inst.family is not None: + out.emit( + f"static_assert({inst.family.size} == {inst.size-1}" + ', "incorrect cache size");\n' + ) + declare_variables(inst, out) + offset = 1 # The instruction itself + stack = Stack() + for part in inst.parts: + # Only emit braces if more than one uop + insert_braces = len([p for p in inst.parts if isinstance(p, Uop)]) > 1 + reachable, offset, stack = write_uop(part, emitter, offset, stack, inst, insert_braces) + out.start_line() + if reachable: # type: ignore[possibly-undefined] + stack.flush(out) + out.emit(f"TRACING_DISPATCH();\n") + out.start_line() + out.emit("}") + out.emit("\n") + def generate_tracer_cases( analysis: Analysis, out: CWriter ) -> None: out.emit(f"#ifdef _Py_TIER2 /* BEGIN TRACING INSTRUCTIONS */\n") - generate_tier1_cases(analysis, out, TracerEmitter(out, analysis.labels), is_tracing=True) + generate_tier1_tracer_cases(analysis, out, TracerEmitter(out, analysis.labels)) out.emit(f"#endif /* END TRACING INSTRUCTIONS */\n") def generate_tracer(