These super instructions need many special cases in the interpreter, specializer, and JIT. It's best we convert them to normal instructions.
case BINARY_OP_EXTEND:
return 1;
case BINARY_OP_INPLACE_ADD_UNICODE:
- return 0;
+ return 1;
case BINARY_OP_MULTIPLY_FLOAT:
return 1;
case BINARY_OP_MULTIPLY_INT:
case CALL_LEN:
return 1;
case CALL_LIST_APPEND:
- return 0;
+ return 1;
case CALL_METHOD_DESCRIPTOR_FAST:
return 1;
case CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS:
typedef struct _PyJitTracerPreviousState {
bool dependencies_still_valid;
- bool instr_is_super;
int code_max_size;
int code_curr_size;
int instr_oparg;
#define _BINARY_OP_ADD_UNICODE_r13 564
#define _BINARY_OP_ADD_UNICODE_r23 565
#define _BINARY_OP_EXTEND_r21 566
-#define _BINARY_OP_INPLACE_ADD_UNICODE_r20 567
+#define _BINARY_OP_INPLACE_ADD_UNICODE_r21 567
#define _BINARY_OP_MULTIPLY_FLOAT_r03 568
#define _BINARY_OP_MULTIPLY_FLOAT_r13 569
#define _BINARY_OP_MULTIPLY_FLOAT_r23 570
#define _CALL_ISINSTANCE_r31 605
#define _CALL_KW_NON_PY_r11 606
#define _CALL_LEN_r33 607
-#define _CALL_LIST_APPEND_r02 608
-#define _CALL_LIST_APPEND_r12 609
-#define _CALL_LIST_APPEND_r22 610
-#define _CALL_LIST_APPEND_r32 611
+#define _CALL_LIST_APPEND_r03 608
+#define _CALL_LIST_APPEND_r13 609
+#define _CALL_LIST_APPEND_r23 610
+#define _CALL_LIST_APPEND_r33 611
#define _CALL_METHOD_DESCRIPTOR_FAST_r01 612
#define _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS_r01 613
#define _CALL_METHOD_DESCRIPTOR_NOARGS_r01 614
.entries = {
{ -1, -1, -1 },
{ -1, -1, -1 },
- { 0, 2, _BINARY_OP_INPLACE_ADD_UNICODE_r20 },
+ { 1, 2, _BINARY_OP_INPLACE_ADD_UNICODE_r21 },
{ -1, -1, -1 },
},
},
[_CALL_LIST_APPEND] = {
.best = { 0, 1, 2, 3 },
.entries = {
- { 2, 0, _CALL_LIST_APPEND_r02 },
- { 2, 1, _CALL_LIST_APPEND_r12 },
- { 2, 2, _CALL_LIST_APPEND_r22 },
- { 2, 3, _CALL_LIST_APPEND_r32 },
+ { 3, 0, _CALL_LIST_APPEND_r03 },
+ { 3, 1, _CALL_LIST_APPEND_r13 },
+ { 3, 2, _CALL_LIST_APPEND_r23 },
+ { 3, 3, _CALL_LIST_APPEND_r33 },
},
},
[_CALL_METHOD_DESCRIPTOR_O] = {
[_BINARY_OP_ADD_UNICODE_r03] = _BINARY_OP_ADD_UNICODE,
[_BINARY_OP_ADD_UNICODE_r13] = _BINARY_OP_ADD_UNICODE,
[_BINARY_OP_ADD_UNICODE_r23] = _BINARY_OP_ADD_UNICODE,
- [_BINARY_OP_INPLACE_ADD_UNICODE_r20] = _BINARY_OP_INPLACE_ADD_UNICODE,
+ [_BINARY_OP_INPLACE_ADD_UNICODE_r21] = _BINARY_OP_INPLACE_ADD_UNICODE,
[_GUARD_BINARY_OP_EXTEND_r22] = _GUARD_BINARY_OP_EXTEND,
[_BINARY_OP_EXTEND_r21] = _BINARY_OP_EXTEND,
[_BINARY_SLICE_r31] = _BINARY_SLICE,
[_GUARD_CALLABLE_LIST_APPEND_r13] = _GUARD_CALLABLE_LIST_APPEND,
[_GUARD_CALLABLE_LIST_APPEND_r23] = _GUARD_CALLABLE_LIST_APPEND,
[_GUARD_CALLABLE_LIST_APPEND_r33] = _GUARD_CALLABLE_LIST_APPEND,
- [_CALL_LIST_APPEND_r02] = _CALL_LIST_APPEND,
- [_CALL_LIST_APPEND_r12] = _CALL_LIST_APPEND,
- [_CALL_LIST_APPEND_r22] = _CALL_LIST_APPEND,
- [_CALL_LIST_APPEND_r32] = _CALL_LIST_APPEND,
+ [_CALL_LIST_APPEND_r03] = _CALL_LIST_APPEND,
+ [_CALL_LIST_APPEND_r13] = _CALL_LIST_APPEND,
+ [_CALL_LIST_APPEND_r23] = _CALL_LIST_APPEND,
+ [_CALL_LIST_APPEND_r33] = _CALL_LIST_APPEND,
[_CALL_METHOD_DESCRIPTOR_O_r01] = _CALL_METHOD_DESCRIPTOR_O,
[_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS_r01] = _CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS,
[_CALL_METHOD_DESCRIPTOR_NOARGS_r01] = _CALL_METHOD_DESCRIPTOR_NOARGS,
[_BINARY_OP_EXTEND] = "_BINARY_OP_EXTEND",
[_BINARY_OP_EXTEND_r21] = "_BINARY_OP_EXTEND_r21",
[_BINARY_OP_INPLACE_ADD_UNICODE] = "_BINARY_OP_INPLACE_ADD_UNICODE",
- [_BINARY_OP_INPLACE_ADD_UNICODE_r20] = "_BINARY_OP_INPLACE_ADD_UNICODE_r20",
+ [_BINARY_OP_INPLACE_ADD_UNICODE_r21] = "_BINARY_OP_INPLACE_ADD_UNICODE_r21",
[_BINARY_OP_MULTIPLY_FLOAT] = "_BINARY_OP_MULTIPLY_FLOAT",
[_BINARY_OP_MULTIPLY_FLOAT_r03] = "_BINARY_OP_MULTIPLY_FLOAT_r03",
[_BINARY_OP_MULTIPLY_FLOAT_r13] = "_BINARY_OP_MULTIPLY_FLOAT_r13",
[_CALL_LEN] = "_CALL_LEN",
[_CALL_LEN_r33] = "_CALL_LEN_r33",
[_CALL_LIST_APPEND] = "_CALL_LIST_APPEND",
- [_CALL_LIST_APPEND_r02] = "_CALL_LIST_APPEND_r02",
- [_CALL_LIST_APPEND_r12] = "_CALL_LIST_APPEND_r12",
- [_CALL_LIST_APPEND_r22] = "_CALL_LIST_APPEND_r22",
- [_CALL_LIST_APPEND_r32] = "_CALL_LIST_APPEND_r32",
+ [_CALL_LIST_APPEND_r03] = "_CALL_LIST_APPEND_r03",
+ [_CALL_LIST_APPEND_r13] = "_CALL_LIST_APPEND_r13",
+ [_CALL_LIST_APPEND_r23] = "_CALL_LIST_APPEND_r23",
+ [_CALL_LIST_APPEND_r33] = "_CALL_LIST_APPEND_r33",
[_CALL_METHOD_DESCRIPTOR_FAST] = "_CALL_METHOD_DESCRIPTOR_FAST",
[_CALL_METHOD_DESCRIPTOR_FAST_r01] = "_CALL_METHOD_DESCRIPTOR_FAST_r01",
[_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS] = "_CALL_METHOD_DESCRIPTOR_FAST_WITH_KEYWORDS",
"BINARY_OP_SUBSCR_STR_INT",
"BINARY_OP_SUBSCR_DICT",
"BINARY_OP_SUBSCR_GETITEM",
+ "BINARY_OP_INPLACE_ADD_UNICODE",
"BINARY_OP_EXTEND",
"BINARY_OP_INPLACE_ADD_UNICODE",
],
'BINARY_OP_ADD_UNICODE': 131,
'BINARY_OP_EXTEND': 132,
'BINARY_OP_INPLACE_ADD_UNICODE': 3,
+ 'BINARY_OP_INPLACE_ADD_UNICODE': 3,
'BINARY_OP_MULTIPLY_FLOAT': 133,
'BINARY_OP_MULTIPLY_INT': 134,
'BINARY_OP_SUBSCR_DICT': 135,
"""), PYTHON_JIT="1")
self.assertEqual(result[0].rc, 0, result)
+ def test_143092(self):
+ def f1():
+ a = "a"
+ for i in range(50):
+ x = a[i % len(a)]
+
+ s = ""
+ for _ in range(10):
+ s += ""
+
+ class A: ...
+ class B: ...
+
+ match s:
+ case int(): ...
+ case str(): ...
+ case dict(): ...
+
+ (
+ u0,
+ *u1,
+ u2,
+ u4,
+ u5,
+ u6,
+ u7,
+ u8,
+ u9, u10, u11,
+ u12, u13, u14, u15, u16, u17, u18, u19, u20, u21, u22, u23, u24, u25, u26, u27, u28, u29,
+ ) = [None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,
+ None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,
+ None, None, None, None, None, None, None, None, None, None, None, None, None, None, None,
+ None, None, None, None, None, None, None, None,]
+
+ s = ""
+ for _ in range(10):
+ s += ""
+ s += ""
+
+ for i in range(TIER2_THRESHOLD * 10):
+ f1()
def global_identity(x):
return x
--- /dev/null
+Fix a crash in the JIT when dealing with ``list.append(x)`` style code.
BINARY_OP_SUBSCR_STR_INT,
BINARY_OP_SUBSCR_DICT,
BINARY_OP_SUBSCR_GETITEM,
- // BINARY_OP_INPLACE_ADD_UNICODE, // See comments at that opcode.
+ BINARY_OP_INPLACE_ADD_UNICODE,
BINARY_OP_EXTEND,
};
macro(BINARY_OP_ADD_UNICODE) =
_GUARD_TOS_UNICODE + _GUARD_NOS_UNICODE + unused/5 + _BINARY_OP_ADD_UNICODE + _POP_TOP_UNICODE + _POP_TOP_UNICODE;
- // This is a subtle one. It's a super-instruction for
- // BINARY_OP_ADD_UNICODE followed by STORE_FAST
- // where the store goes into the left argument.
- // So the inputs are the same as for all BINARY_OP
- // specializations, but there is no output.
- // At the end we just skip over the STORE_FAST.
- op(_BINARY_OP_INPLACE_ADD_UNICODE, (left, right --)) {
+ // This is a subtle one. We write NULL to the local
+ // of the following STORE_FAST and leave the result for STORE_FAST
+ // later to store.
+ op(_BINARY_OP_INPLACE_ADD_UNICODE, (left, right -- res)) {
PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
assert(PyUnicode_CheckExact(left_o));
assert(PyUnicode_CheckExact(PyStackRef_AsPyObjectBorrow(right)));
* that the string is safe to mutate.
*/
assert(Py_REFCNT(left_o) >= 2 || !PyStackRef_IsHeapSafe(left));
- PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc);
- DEAD(left);
PyObject *temp = PyStackRef_AsPyObjectSteal(*target_local);
- PyObject *right_o = PyStackRef_AsPyObjectSteal(right);
+ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
PyUnicode_Append(&temp, right_o);
- *target_local = PyStackRef_FromPyObjectSteal(temp);
- Py_DECREF(right_o);
- ERROR_IF(PyStackRef_IsNull(*target_local));
- #if TIER_ONE
- // The STORE_FAST is already done. This is done here in tier one,
- // and during trace projection in tier two:
- assert(next_instr->op.code == STORE_FAST);
- SKIP_OVER(1);
- #endif
+ PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc);
+ DEAD(right);
+ PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc);
+ DEAD(left);
+ ERROR_IF(temp == NULL);
+ res = PyStackRef_FromPyObjectSteal(temp);
+ *target_local = PyStackRef_NULL;
}
op(_GUARD_BINARY_OP_EXTEND, (descr/4, left, right -- left, right)) {
DEOPT_IF(callable_o != interp->callable_cache.list_append);
}
- // This is secretly a super-instruction
- op(_CALL_LIST_APPEND, (callable, self, arg -- c, s)) {
+ op(_CALL_LIST_APPEND, (callable, self, arg -- none, c, s)) {
assert(oparg == 1);
PyObject *self_o = PyStackRef_AsPyObjectBorrow(self);
}
c = callable;
s = self;
- INPUTS_DEAD();
- #if TIER_ONE
- // Skip the following POP_TOP. This is done here in tier one, and
- // during trace projection in tier two:
- assert(next_instr->op.code == POP_TOP);
- SKIP_OVER(1);
- #endif
+ DEAD(callable);
+ DEAD(self);
+ none = PyStackRef_None;
}
op(_CALL_METHOD_DESCRIPTOR_O, (callable, self_or_null, args[oparg] -- res)) {
// Super instructions. Instruction deopted. There's a mismatch in what the stack expects
// in the optimizer. So we have to reflect in the trace correctly.
_PyThreadStateImpl *_tstate = (_PyThreadStateImpl *)tstate;
- if ((_tstate->jit_tracer_state.prev_state.instr->op.code == CALL_LIST_APPEND &&
- opcode == POP_TOP) ||
- (_tstate->jit_tracer_state.prev_state.instr->op.code == BINARY_OP_INPLACE_ADD_UNICODE &&
- opcode == STORE_FAST)) {
- _tstate->jit_tracer_state.prev_state.instr_is_super = true;
- }
- else {
- _tstate->jit_tracer_state.prev_state.instr = next_instr;
- }
+ // JIT should have disabled super instructions, as we can
+ // do these optimizations ourselves in the JIT.
+ _tstate->jit_tracer_state.prev_state.instr = next_instr;
PyObject *prev_code = PyStackRef_AsPyObjectBorrow(frame->f_executable);
if (_tstate->jit_tracer_state.prev_state.instr_code != (PyCodeObject *)prev_code) {
Py_SETREF(_tstate->jit_tracer_state.prev_state.instr_code, (PyCodeObject*)Py_NewRef((prev_code)));
break;
}
- case _BINARY_OP_INPLACE_ADD_UNICODE_r20: {
+ case _BINARY_OP_INPLACE_ADD_UNICODE_r21: {
CHECK_CURRENT_CACHED_VALUES(2);
assert(WITHIN_STACK_BOUNDS_IGNORING_CACHE());
_PyStackRef right;
_PyStackRef left;
+ _PyStackRef res;
_PyStackRef _stack_item_0 = _tos_cache0;
_PyStackRef _stack_item_1 = _tos_cache1;
right = _stack_item_1;
}
STAT_INC(BINARY_OP, hit);
assert(Py_REFCNT(left_o) >= 2 || !PyStackRef_IsHeapSafe(left));
- PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc);
PyObject *temp = PyStackRef_AsPyObjectSteal(*target_local);
- PyObject *right_o = PyStackRef_AsPyObjectSteal(right);
+ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
+ stack_pointer[0] = left;
+ stack_pointer[1] = right;
+ stack_pointer += 2;
+ ASSERT_WITHIN_STACK_BOUNDS(__FILE__, __LINE__);
_PyFrame_SetStackPointer(frame, stack_pointer);
PyUnicode_Append(&temp, right_o);
stack_pointer = _PyFrame_GetStackPointer(frame);
- *target_local = PyStackRef_FromPyObjectSteal(temp);
- _PyFrame_SetStackPointer(frame, stack_pointer);
- Py_DECREF(right_o);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (PyStackRef_IsNull(*target_local)) {
+ PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc);
+ PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc);
+ if (temp == NULL) {
+ stack_pointer += -2;
+ ASSERT_WITHIN_STACK_BOUNDS(__FILE__, __LINE__);
SET_CURRENT_CACHED_VALUES(0);
JUMP_TO_ERROR();
}
- #if TIER_ONE
-
- assert(next_instr->op.code == STORE_FAST);
- SKIP_OVER(1);
- #endif
- _tos_cache0 = PyStackRef_ZERO_BITS;
+ res = PyStackRef_FromPyObjectSteal(temp);
+ *target_local = PyStackRef_NULL;
+ _tos_cache0 = res;
_tos_cache1 = PyStackRef_ZERO_BITS;
_tos_cache2 = PyStackRef_ZERO_BITS;
- SET_CURRENT_CACHED_VALUES(0);
+ SET_CURRENT_CACHED_VALUES(1);
+ stack_pointer += -2;
+ ASSERT_WITHIN_STACK_BOUNDS(__FILE__, __LINE__);
assert(WITHIN_STACK_BOUNDS_IGNORING_CACHE());
break;
}
break;
}
- case _CALL_LIST_APPEND_r02: {
+ case _CALL_LIST_APPEND_r03: {
CHECK_CURRENT_CACHED_VALUES(0);
assert(WITHIN_STACK_BOUNDS_IGNORING_CACHE());
_PyStackRef arg;
_PyStackRef self;
_PyStackRef callable;
+ _PyStackRef none;
_PyStackRef c;
_PyStackRef s;
oparg = CURRENT_OPARG();
}
c = callable;
s = self;
- #if TIER_ONE
-
- assert(next_instr->op.code == POP_TOP);
- SKIP_OVER(1);
- #endif
- _tos_cache1 = s;
- _tos_cache0 = c;
- SET_CURRENT_CACHED_VALUES(2);
+ none = PyStackRef_None;
+ _tos_cache2 = s;
+ _tos_cache1 = c;
+ _tos_cache0 = none;
+ SET_CURRENT_CACHED_VALUES(3);
stack_pointer += -3;
ASSERT_WITHIN_STACK_BOUNDS(__FILE__, __LINE__);
assert(WITHIN_STACK_BOUNDS_IGNORING_CACHE());
break;
}
- case _CALL_LIST_APPEND_r12: {
+ case _CALL_LIST_APPEND_r13: {
CHECK_CURRENT_CACHED_VALUES(1);
assert(WITHIN_STACK_BOUNDS_IGNORING_CACHE());
_PyStackRef arg;
_PyStackRef self;
_PyStackRef callable;
+ _PyStackRef none;
_PyStackRef c;
_PyStackRef s;
_PyStackRef _stack_item_0 = _tos_cache0;
}
c = callable;
s = self;
- #if TIER_ONE
-
- assert(next_instr->op.code == POP_TOP);
- SKIP_OVER(1);
- #endif
- _tos_cache1 = s;
- _tos_cache0 = c;
- SET_CURRENT_CACHED_VALUES(2);
+ none = PyStackRef_None;
+ _tos_cache2 = s;
+ _tos_cache1 = c;
+ _tos_cache0 = none;
+ SET_CURRENT_CACHED_VALUES(3);
stack_pointer += -2;
ASSERT_WITHIN_STACK_BOUNDS(__FILE__, __LINE__);
assert(WITHIN_STACK_BOUNDS_IGNORING_CACHE());
break;
}
- case _CALL_LIST_APPEND_r22: {
+ case _CALL_LIST_APPEND_r23: {
CHECK_CURRENT_CACHED_VALUES(2);
assert(WITHIN_STACK_BOUNDS_IGNORING_CACHE());
_PyStackRef arg;
_PyStackRef self;
_PyStackRef callable;
+ _PyStackRef none;
_PyStackRef c;
_PyStackRef s;
_PyStackRef _stack_item_0 = _tos_cache0;
}
c = callable;
s = self;
- #if TIER_ONE
-
- assert(next_instr->op.code == POP_TOP);
- SKIP_OVER(1);
- #endif
- _tos_cache1 = s;
- _tos_cache0 = c;
- SET_CURRENT_CACHED_VALUES(2);
+ none = PyStackRef_None;
+ _tos_cache2 = s;
+ _tos_cache1 = c;
+ _tos_cache0 = none;
+ SET_CURRENT_CACHED_VALUES(3);
stack_pointer += -1;
ASSERT_WITHIN_STACK_BOUNDS(__FILE__, __LINE__);
assert(WITHIN_STACK_BOUNDS_IGNORING_CACHE());
break;
}
- case _CALL_LIST_APPEND_r32: {
+ case _CALL_LIST_APPEND_r33: {
CHECK_CURRENT_CACHED_VALUES(3);
assert(WITHIN_STACK_BOUNDS_IGNORING_CACHE());
_PyStackRef arg;
_PyStackRef self;
_PyStackRef callable;
+ _PyStackRef none;
_PyStackRef c;
_PyStackRef s;
_PyStackRef _stack_item_0 = _tos_cache0;
}
c = callable;
s = self;
- #if TIER_ONE
-
- assert(next_instr->op.code == POP_TOP);
- SKIP_OVER(1);
- #endif
- _tos_cache1 = s;
- _tos_cache0 = c;
- SET_CURRENT_CACHED_VALUES(2);
+ none = PyStackRef_None;
+ _tos_cache2 = s;
+ _tos_cache1 = c;
+ _tos_cache0 = none;
+ SET_CURRENT_CACHED_VALUES(3);
assert(WITHIN_STACK_BOUNDS_IGNORING_CACHE());
break;
}
_PyStackRef nos;
_PyStackRef left;
_PyStackRef right;
+ _PyStackRef res;
// _GUARD_TOS_UNICODE
{
value = stack_pointer[-1];
}
STAT_INC(BINARY_OP, hit);
assert(Py_REFCNT(left_o) >= 2 || !PyStackRef_IsHeapSafe(left));
- PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc);
PyObject *temp = PyStackRef_AsPyObjectSteal(*target_local);
- PyObject *right_o = PyStackRef_AsPyObjectSteal(right);
- stack_pointer += -2;
- ASSERT_WITHIN_STACK_BOUNDS(__FILE__, __LINE__);
+ PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
_PyFrame_SetStackPointer(frame, stack_pointer);
PyUnicode_Append(&temp, right_o);
stack_pointer = _PyFrame_GetStackPointer(frame);
- *target_local = PyStackRef_FromPyObjectSteal(temp);
- _PyFrame_SetStackPointer(frame, stack_pointer);
- Py_DECREF(right_o);
- stack_pointer = _PyFrame_GetStackPointer(frame);
- if (PyStackRef_IsNull(*target_local)) {
- JUMP_TO_LABEL(error);
+ PyStackRef_CLOSE_SPECIALIZED(right, _PyUnicode_ExactDealloc);
+ PyStackRef_CLOSE_SPECIALIZED(left, _PyUnicode_ExactDealloc);
+ if (temp == NULL) {
+ JUMP_TO_LABEL(pop_2_error);
}
- #if TIER_ONE
-
- assert(next_instr->op.code == STORE_FAST);
- SKIP_OVER(1);
- #endif
+ res = PyStackRef_FromPyObjectSteal(temp);
+ *target_local = PyStackRef_NULL;
}
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ ASSERT_WITHIN_STACK_BOUNDS(__FILE__, __LINE__);
DISPATCH();
}
_PyStackRef nos;
_PyStackRef self;
_PyStackRef arg;
+ _PyStackRef none;
_PyStackRef c;
_PyStackRef s;
_PyStackRef value;
}
c = callable;
s = self;
- #if TIER_ONE
-
- assert(next_instr->op.code == POP_TOP);
- SKIP_OVER(1);
- #endif
+ none = PyStackRef_None;
}
// _POP_TOP
{
value = s;
- stack_pointer[-3] = c;
- stack_pointer += -2;
+ stack_pointer[-3] = none;
+ stack_pointer[-2] = c;
+ stack_pointer += -1;
ASSERT_WITHIN_STACK_BOUNDS(__FILE__, __LINE__);
_PyFrame_SetStackPointer(frame, stack_pointer);
PyStackRef_XCLOSE(value);
DISPATCH();
}
_PyThreadStateImpl *_tstate = (_PyThreadStateImpl *)tstate;
- if ((_tstate->jit_tracer_state.prev_state.instr->op.code == CALL_LIST_APPEND &&
- opcode == POP_TOP) ||
- (_tstate->jit_tracer_state.prev_state.instr->op.code == BINARY_OP_INPLACE_ADD_UNICODE &&
- opcode == STORE_FAST)) {
- _tstate->jit_tracer_state.prev_state.instr_is_super = true;
- }
- else {
- _tstate->jit_tracer_state.prev_state.instr = next_instr;
- }
+ _tstate->jit_tracer_state.prev_state.instr = next_instr;
PyObject *prev_code = PyStackRef_AsPyObjectBorrow(frame->f_executable);
if (_tstate->jit_tracer_state.prev_state.instr_code != (PyCodeObject *)prev_code) {
_PyFrame_SetStackPointer(frame, stack_pointer);
}
#endif
- // Skip over super instructions.
- if (_tstate->jit_tracer_state.prev_state.instr_is_super) {
- _tstate->jit_tracer_state.prev_state.instr_is_super = false;
- return 1;
- }
-
if (opcode == ENTER_EXECUTOR) {
goto full;
}
_tstate->jit_tracer_state.prev_state.instr_frame = frame;
_tstate->jit_tracer_state.prev_state.instr_oparg = oparg;
_tstate->jit_tracer_state.prev_state.instr_stacklevel = curr_stackdepth;
- _tstate->jit_tracer_state.prev_state.instr_is_super = false;
assert(curr_instr->op.code == JUMP_BACKWARD_JIT || (exit != NULL));
_tstate->jit_tracer_state.initial_state.jump_backward_instr = curr_instr;
}
op(_BINARY_OP_ADD_UNICODE, (left, right -- res, l, r)) {
- REPLACE_OPCODE_IF_EVALUATES_PURE(left, right);
res = sym_new_type(ctx, &PyUnicode_Type);
l = left;
r = right;
}
- op(_BINARY_OP_INPLACE_ADD_UNICODE, (left, right --)) {
- JitOptRef res;
+ op(_BINARY_OP_INPLACE_ADD_UNICODE, (left, right -- res)) {
if (sym_is_const(ctx, left) && sym_is_const(ctx, right)) {
assert(PyUnicode_CheckExact(sym_get_const(ctx, left)));
assert(PyUnicode_CheckExact(sym_get_const(ctx, right)));
else {
res = sym_new_type(ctx, &PyUnicode_Type);
}
- // _STORE_FAST:
- GETLOCAL(this_instr->operand0) = res;
+ GETLOCAL(this_instr->operand0) = sym_new_null(ctx);
}
op(_BINARY_OP_SUBSCR_INIT_CALL, (container, sub, getitem -- new_frame)) {
sym_set_const(flag, Py_True);
}
- op(_CALL_LIST_APPEND, (callable, self, arg -- c, s)) {
+ op(_CALL_LIST_APPEND, (callable, self, arg -- none, c, s)) {
(void)(arg);
c = callable;
s = self;
+ none = sym_new_const(ctx, Py_None);
}
op(_GUARD_IS_FALSE_POP, (flag -- )) {
JitOptRef r;
right = stack_pointer[-1];
left = stack_pointer[-2];
- if (
- sym_is_safe_const(ctx, left) &&
- sym_is_safe_const(ctx, right)
- ) {
- JitOptRef left_sym = left;
- JitOptRef right_sym = right;
- _PyStackRef left = sym_get_const_as_stackref(ctx, left_sym);
- _PyStackRef right = sym_get_const_as_stackref(ctx, right_sym);
- _PyStackRef res_stackref;
- _PyStackRef l_stackref;
- _PyStackRef r_stackref;
- /* Start of uop copied from bytecodes for constant evaluation */
- PyObject *left_o = PyStackRef_AsPyObjectBorrow(left);
- PyObject *right_o = PyStackRef_AsPyObjectBorrow(right);
- assert(PyUnicode_CheckExact(left_o));
- assert(PyUnicode_CheckExact(right_o));
- STAT_INC(BINARY_OP, hit);
- PyObject *res_o = PyUnicode_Concat(left_o, right_o);
- res_stackref = PyStackRef_FromPyObjectSteal(res_o);
- if (PyStackRef_IsNull(res)) {
- JUMP_TO_LABEL(error);
- }
- l_stackref = left;
- r_stackref = right;
- /* End of uop copied from bytecodes for constant evaluation */
- res = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(res_stackref));
- l = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(l_stackref));
- r = sym_new_const_steal(ctx, PyStackRef_AsPyObjectSteal(r_stackref));
- CHECK_STACK_BOUNDS(1);
- stack_pointer[-2] = res;
- stack_pointer[-1] = l;
- stack_pointer[0] = r;
- stack_pointer += 1;
- ASSERT_WITHIN_STACK_BOUNDS(__FILE__, __LINE__);
- break;
- }
res = sym_new_type(ctx, &PyUnicode_Type);
l = left;
r = right;
case _BINARY_OP_INPLACE_ADD_UNICODE: {
JitOptRef right;
JitOptRef left;
+ JitOptRef res;
right = stack_pointer[-1];
left = stack_pointer[-2];
- JitOptRef res;
if (sym_is_const(ctx, left) && sym_is_const(ctx, right)) {
assert(PyUnicode_CheckExact(sym_get_const(ctx, left)));
assert(PyUnicode_CheckExact(sym_get_const(ctx, right)));
goto error;
}
res = sym_new_const(ctx, temp);
+ CHECK_STACK_BOUNDS(-1);
+ stack_pointer[-2] = res;
+ stack_pointer += -1;
+ ASSERT_WITHIN_STACK_BOUNDS(__FILE__, __LINE__);
Py_DECREF(temp);
}
else {
res = sym_new_type(ctx, &PyUnicode_Type);
+ stack_pointer += -1;
}
- GETLOCAL(this_instr->operand0) = res;
- CHECK_STACK_BOUNDS(-2);
- stack_pointer += -2;
- ASSERT_WITHIN_STACK_BOUNDS(__FILE__, __LINE__);
+ GETLOCAL(this_instr->operand0) = sym_new_null(ctx);
+ stack_pointer[-1] = res;
break;
}
JitOptRef arg;
JitOptRef self;
JitOptRef callable;
+ JitOptRef none;
JitOptRef c;
JitOptRef s;
arg = stack_pointer[-1];
(void)(arg);
c = callable;
s = self;
- CHECK_STACK_BOUNDS(-1);
- stack_pointer[-3] = c;
- stack_pointer[-2] = s;
- stack_pointer += -1;
- ASSERT_WITHIN_STACK_BOUNDS(__FILE__, __LINE__);
+ none = sym_new_const(ctx, Py_None);
+ stack_pointer[-3] = none;
+ stack_pointer[-2] = c;
+ stack_pointer[-1] = s;
break;
}
}
PyInterpreterState *interp = _PyInterpreterState_GET();
PyObject *list_append = interp->callable_cache.list_append;
- _Py_CODEUNIT next = instr[INLINE_CACHE_ENTRIES_CALL + 1];
- bool pop = (next.op.code == POP_TOP);
int oparg = instr->op.arg;
- if ((PyObject *)descr == list_append && oparg == 1 && pop) {
+ if ((PyObject *)descr == list_append && oparg == 1) {
assert(self_or_null != NULL);
if (PyList_CheckExact(self_or_null)) {
specialize(instr, CALL_LIST_APPEND);