# define LEAVE_TRACING() \
DISPATCH_TABLE_VAR = DISPATCH_TABLE;
# define BAIL_TRACING_NO_DISPATCH() \
- LEAVE_TRACING(); \
- _PyFrame_SetStackPointer(frame, stack_pointer); \
- int _err = _PyOptimizer_Optimize(frame, tstate); \
- _PyJIT_FinalizeTracing(tstate); \
- stack_pointer = _PyFrame_GetStackPointer(frame); \
- if (_err < 0) { \
- JUMP_TO_LABEL(error); \
- }
+ do { \
+ LEAVE_TRACING(); \
+ if (!_PyErr_Occurred(tstate)) { \
+ _PyFrame_SetStackPointer(frame, stack_pointer); \
+ int _err = _PyOptimizer_Optimize(frame, tstate); \
+ _PyJIT_FinalizeTracing(tstate); \
+ stack_pointer = _PyFrame_GetStackPointer(frame); \
+ if (_err < 0) { \
+ JUMP_TO_LABEL(error); \
+ } \
+ } \
+ else { \
+ _PyFrame_SetStackPointer(frame, stack_pointer); \
+ _PyJIT_FinalizeTracing(tstate); \
+ stack_pointer = _PyFrame_GetStackPointer(frame); \
+ } \
+ } while (0);
# define RECORD_TRACE_NO_DISPATCH() do { \
if (IS_JIT_TRACING() && add_to_code_trace(tstate, frame, old_code, old_func, this_instr, next_instr, opcode, oparg, _jump_taken)) { \
BAIL_TRACING_NO_DISPATCH(); \
}
if (uop == _PUSH_FRAME || uop == _RETURN_VALUE || uop == _RETURN_GENERATOR || uop == _YIELD_VALUE) {
PyCodeObject *new_code = (PyCodeObject *)PyStackRef_AsPyObjectBorrow(frame->f_executable);
- if (func != NULL) {
- operand = (uintptr_t)func;
+ PyFunctionObject *new_func = (PyCodeObject *)PyStackRef_AsPyObjectBorrow(frame->f_funcobj);
+ if (new_func != NULL) {
+ operand = (uintptr_t)new_func;
}
else if (new_code != NULL) {
operand = (uintptr_t)new_code | 1;
{
OPT_STAT_INC(optimizer_attempts);
- // int err = optimize_uops(
- // initial_func, buffer,
- // length, curr_stacklen, dependencies);
- //
- // if (err == 0) {
- // return err;
- // }
+ int err = optimize_uops(
+ initial_func, buffer,
+ length, curr_stacklen, dependencies);
+
+ if (err == 0) {
+ return err;
+ }
assert(length > 0);
op(_RETURN_GENERATOR, ( -- res)) {
SYNC_SP();
- PyCodeObject *co = get_current_code_object(ctx);
- ctx->frame->stack_pointer = stack_pointer;
- if (frame_pop(ctx)) {
- break;
- }
stack_pointer = ctx->frame->stack_pointer;
res = sym_new_unknown(ctx);
-
- /* Stack space handling */
- assert(corresponding_check_stack == NULL);
- assert(co != NULL);
- int framesize = co->co_framesize;
- assert(framesize > 0);
- assert(framesize <= curr_space);
- curr_space -= framesize;
+ ctx->done = true;
+ ctx->out_of_space = true;
}
op(_YIELD_VALUE, (unused -- value)) {
+ // TODO (gh-139109): handle this properly in a future optimization.
+ // A possibility to handle underflows is to just restore the current frame information
+ // from whatever is stored in the trace we record at that point of time.
+ // E.g. we record at this YIELD_VALUE, func_obj=x , stack_level=4
+ // We can restore it to there.
value = sym_new_unknown(ctx);
+ ctx->done = true;
+ ctx->out_of_space = true;
}
op(_GET_ITER, (iterable -- iter, index_or_null)) {
case _YIELD_VALUE: {
JitOptRef value;
value = sym_new_unknown(ctx);
+ ctx->done = true;
+ ctx->out_of_space = true;
stack_pointer[-1] = value;
break;
}
case _RETURN_GENERATOR: {
JitOptRef res;
- PyCodeObject *co = get_current_code_object(ctx);
- ctx->frame->stack_pointer = stack_pointer;
- if (frame_pop(ctx)) {
- break;
- }
stack_pointer = ctx->frame->stack_pointer;
res = sym_new_unknown(ctx);
- assert(corresponding_check_stack == NULL);
- assert(co != NULL);
- int framesize = co->co_framesize;
- assert(framesize > 0);
- assert(framesize <= curr_space);
- curr_space -= framesize;
+ ctx->done = true;
+ ctx->out_of_space = true;
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());