Python 3.14a7 3623 (Add BUILD_INTERPOLATION & BUILD_TEMPLATE opcodes)
Python 3.14b1 3624 (Don't optimize LOAD_FAST when local is killed by DELETE_FAST)
Python 3.15a0 3650 (Initial version)
+ Python 3.15a1 3651 (Simplify LOAD_CONST)
Python 3.16 will start with 3700
*/
-#define PYC_MAGIC_NUMBER 3650
+#define PYC_MAGIC_NUMBER 3651
/* This is equivalent to converting PYC_MAGIC_NUMBER to 2 bytes
(little-endian) and then appending b'\r\n'. */
#define PYC_MAGIC_NUMBER_TOKEN \
return 0;
case LOAD_CONST:
return 0;
- case LOAD_CONST_IMMORTAL:
- return 0;
- case LOAD_CONST_MORTAL:
- return 0;
case LOAD_DEREF:
return 0;
case LOAD_FAST:
return 1;
case LOAD_CONST:
return 1;
- case LOAD_CONST_IMMORTAL:
- return 1;
- case LOAD_CONST_MORTAL:
- return 1;
case LOAD_DEREF:
return 1;
case LOAD_FAST:
[LOAD_BUILD_CLASS] = { true, INSTR_FMT_IX, HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[LOAD_COMMON_CONSTANT] = { true, INSTR_FMT_IB, HAS_ARG_FLAG },
[LOAD_CONST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG },
- [LOAD_CONST_IMMORTAL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG },
- [LOAD_CONST_MORTAL] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_CONST_FLAG },
[LOAD_DEREF] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_ERROR_FLAG | HAS_ESCAPES_FLAG },
[LOAD_FAST] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG | HAS_PURE_FLAG },
[LOAD_FAST_AND_CLEAR] = { true, INSTR_FMT_IB, HAS_ARG_FLAG | HAS_LOCAL_FLAG },
[LOAD_ATTR_WITH_HINT] = { .nuops = 3, .uops = { { _GUARD_TYPE_VERSION, 2, 1 }, { _LOAD_ATTR_WITH_HINT, 1, 3 }, { _PUSH_NULL_CONDITIONAL, OPARG_SIMPLE, 9 } } },
[LOAD_BUILD_CLASS] = { .nuops = 1, .uops = { { _LOAD_BUILD_CLASS, OPARG_SIMPLE, 0 } } },
[LOAD_COMMON_CONSTANT] = { .nuops = 1, .uops = { { _LOAD_COMMON_CONSTANT, OPARG_SIMPLE, 0 } } },
- [LOAD_CONST_IMMORTAL] = { .nuops = 1, .uops = { { _LOAD_CONST_IMMORTAL, OPARG_SIMPLE, 0 } } },
- [LOAD_CONST_MORTAL] = { .nuops = 1, .uops = { { _LOAD_CONST_MORTAL, OPARG_SIMPLE, 0 } } },
+ [LOAD_CONST] = { .nuops = 1, .uops = { { _LOAD_CONST, OPARG_SIMPLE, 0 } } },
[LOAD_DEREF] = { .nuops = 1, .uops = { { _LOAD_DEREF, OPARG_SIMPLE, 0 } } },
[LOAD_FAST] = { .nuops = 1, .uops = { { _LOAD_FAST, OPARG_SIMPLE, 0 } } },
[LOAD_FAST_AND_CLEAR] = { .nuops = 1, .uops = { { _LOAD_FAST_AND_CLEAR, OPARG_SIMPLE, 0 } } },
[LOAD_CLOSURE] = "LOAD_CLOSURE",
[LOAD_COMMON_CONSTANT] = "LOAD_COMMON_CONSTANT",
[LOAD_CONST] = "LOAD_CONST",
- [LOAD_CONST_IMMORTAL] = "LOAD_CONST_IMMORTAL",
- [LOAD_CONST_MORTAL] = "LOAD_CONST_MORTAL",
[LOAD_DEREF] = "LOAD_DEREF",
[LOAD_FAST] = "LOAD_FAST",
[LOAD_FAST_AND_CLEAR] = "LOAD_FAST_AND_CLEAR",
[125] = 125,
[126] = 126,
[127] = 127,
+ [210] = 210,
+ [211] = 211,
[212] = 212,
[213] = 213,
[214] = 214,
[LOAD_BUILD_CLASS] = LOAD_BUILD_CLASS,
[LOAD_COMMON_CONSTANT] = LOAD_COMMON_CONSTANT,
[LOAD_CONST] = LOAD_CONST,
- [LOAD_CONST_IMMORTAL] = LOAD_CONST,
- [LOAD_CONST_MORTAL] = LOAD_CONST,
[LOAD_DEREF] = LOAD_DEREF,
[LOAD_FAST] = LOAD_FAST,
[LOAD_FAST_AND_CLEAR] = LOAD_FAST_AND_CLEAR,
case 125: \
case 126: \
case 127: \
+ case 210: \
+ case 211: \
case 212: \
case 213: \
case 214: \
#define PyStackRef_FromPyObjectSteal(obj) _PyStackRef_FromPyObjectSteal(_PyObject_CAST(obj), __FILE__, __LINE__)
static inline _PyStackRef
-_PyStackRef_FromPyObjectImmortal(PyObject *obj, const char *filename, int linenumber)
+_PyStackRef_FromPyObjectBorrow(PyObject *obj, const char *filename, int linenumber)
{
- assert(_Py_IsImmortal(obj));
return _Py_stackref_create(obj, filename, linenumber);
}
-#define PyStackRef_FromPyObjectImmortal(obj) _PyStackRef_FromPyObjectImmortal(_PyObject_CAST(obj), __FILE__, __LINE__)
+#define PyStackRef_FromPyObjectBorrow(obj) _PyStackRef_FromPyObjectBorrow(_PyObject_CAST(obj), __FILE__, __LINE__)
static inline void
_PyStackRef_CLOSE(_PyStackRef ref, const char *filename, int linenumber)
#define PyStackRef_FromPyObjectNew(obj) PyStackRef_FromPyObjectNew(_PyObject_CAST(obj))
static inline _PyStackRef
-PyStackRef_FromPyObjectImmortal(PyObject *obj)
+PyStackRef_FromPyObjectBorrow(PyObject *obj)
{
// Make sure we don't take an already tagged value.
assert(((uintptr_t)obj & Py_TAG_BITS) == 0);
assert(obj != NULL);
- assert(_Py_IsImmortal(obj));
return (_PyStackRef){ .bits = (uintptr_t)obj | Py_TAG_DEFERRED };
}
-#define PyStackRef_FromPyObjectImmortal(obj) PyStackRef_FromPyObjectImmortal(_PyObject_CAST(obj))
+#define PyStackRef_FromPyObjectBorrow(obj) PyStackRef_FromPyObjectBorrow(_PyObject_CAST(obj))
#define PyStackRef_CLOSE(REF) \
do { \
/* Create a new reference from an object with an embedded reference count */
static inline _PyStackRef
-PyStackRef_FromPyObjectImmortal(PyObject *obj)
+PyStackRef_FromPyObjectBorrow(PyObject *obj)
{
- assert(_Py_IsImmortal(obj));
return (_PyStackRef){ .bits = (uintptr_t)obj | Py_TAG_REFCNT};
}
#define _LOAD_BYTECODE 445
#define _LOAD_COMMON_CONSTANT LOAD_COMMON_CONSTANT
#define _LOAD_CONST LOAD_CONST
-#define _LOAD_CONST_IMMORTAL LOAD_CONST_IMMORTAL
#define _LOAD_CONST_INLINE 446
#define _LOAD_CONST_INLINE_BORROW 447
-#define _LOAD_CONST_MORTAL LOAD_CONST_MORTAL
#define _LOAD_DEREF LOAD_DEREF
#define _LOAD_FAST 448
#define _LOAD_FAST_0 449
[_LOAD_FAST_AND_CLEAR] = HAS_ARG_FLAG | HAS_LOCAL_FLAG,
[_LOAD_FAST_LOAD_FAST] = HAS_ARG_FLAG | HAS_LOCAL_FLAG,
[_LOAD_FAST_BORROW_LOAD_FAST_BORROW] = HAS_ARG_FLAG | HAS_LOCAL_FLAG,
- [_LOAD_CONST_MORTAL] = HAS_ARG_FLAG | HAS_CONST_FLAG,
- [_LOAD_CONST_IMMORTAL] = HAS_ARG_FLAG | HAS_CONST_FLAG,
+ [_LOAD_CONST] = HAS_ARG_FLAG | HAS_CONST_FLAG,
[_LOAD_SMALL_INT_0] = 0,
[_LOAD_SMALL_INT_1] = 0,
[_LOAD_SMALL_INT_2] = 0,
[_LOAD_ATTR_WITH_HINT] = "_LOAD_ATTR_WITH_HINT",
[_LOAD_BUILD_CLASS] = "_LOAD_BUILD_CLASS",
[_LOAD_COMMON_CONSTANT] = "_LOAD_COMMON_CONSTANT",
- [_LOAD_CONST_IMMORTAL] = "_LOAD_CONST_IMMORTAL",
+ [_LOAD_CONST] = "_LOAD_CONST",
[_LOAD_CONST_INLINE] = "_LOAD_CONST_INLINE",
[_LOAD_CONST_INLINE_BORROW] = "_LOAD_CONST_INLINE_BORROW",
- [_LOAD_CONST_MORTAL] = "_LOAD_CONST_MORTAL",
[_LOAD_DEREF] = "_LOAD_DEREF",
[_LOAD_FAST] = "_LOAD_FAST",
[_LOAD_FAST_0] = "_LOAD_FAST_0",
return 0;
case _LOAD_FAST_BORROW_LOAD_FAST_BORROW:
return 0;
- case _LOAD_CONST_MORTAL:
- return 0;
- case _LOAD_CONST_IMMORTAL:
+ case _LOAD_CONST:
return 0;
case _LOAD_SMALL_INT_0:
return 0;
#define LOAD_ATTR_PROPERTY 187
#define LOAD_ATTR_SLOT 188
#define LOAD_ATTR_WITH_HINT 189
-#define LOAD_CONST_IMMORTAL 190
-#define LOAD_CONST_MORTAL 191
-#define LOAD_GLOBAL_BUILTIN 192
-#define LOAD_GLOBAL_MODULE 193
-#define LOAD_SUPER_ATTR_ATTR 194
-#define LOAD_SUPER_ATTR_METHOD 195
-#define RESUME_CHECK 196
-#define SEND_GEN 197
-#define STORE_ATTR_INSTANCE_VALUE 198
-#define STORE_ATTR_SLOT 199
-#define STORE_ATTR_WITH_HINT 200
-#define STORE_SUBSCR_DICT 201
-#define STORE_SUBSCR_LIST_INT 202
-#define TO_BOOL_ALWAYS_TRUE 203
-#define TO_BOOL_BOOL 204
-#define TO_BOOL_INT 205
-#define TO_BOOL_LIST 206
-#define TO_BOOL_NONE 207
-#define TO_BOOL_STR 208
-#define UNPACK_SEQUENCE_LIST 209
-#define UNPACK_SEQUENCE_TUPLE 210
-#define UNPACK_SEQUENCE_TWO_TUPLE 211
+#define LOAD_GLOBAL_BUILTIN 190
+#define LOAD_GLOBAL_MODULE 191
+#define LOAD_SUPER_ATTR_ATTR 192
+#define LOAD_SUPER_ATTR_METHOD 193
+#define RESUME_CHECK 194
+#define SEND_GEN 195
+#define STORE_ATTR_INSTANCE_VALUE 196
+#define STORE_ATTR_SLOT 197
+#define STORE_ATTR_WITH_HINT 198
+#define STORE_SUBSCR_DICT 199
+#define STORE_SUBSCR_LIST_INT 200
+#define TO_BOOL_ALWAYS_TRUE 201
+#define TO_BOOL_BOOL 202
+#define TO_BOOL_INT 203
+#define TO_BOOL_LIST 204
+#define TO_BOOL_NONE 205
+#define TO_BOOL_STR 206
+#define UNPACK_SEQUENCE_LIST 207
+#define UNPACK_SEQUENCE_TUPLE 208
+#define UNPACK_SEQUENCE_TWO_TUPLE 209
#define INSTRUMENTED_END_FOR 234
#define INSTRUMENTED_POP_ITER 235
#define INSTRUMENTED_END_SEND 236
"RESUME": [
"RESUME_CHECK",
],
- "LOAD_CONST": [
- "LOAD_CONST_MORTAL",
- "LOAD_CONST_IMMORTAL",
- ],
"TO_BOOL": [
"TO_BOOL_ALWAYS_TRUE",
"TO_BOOL_BOOL",
'LOAD_ATTR_PROPERTY': 187,
'LOAD_ATTR_SLOT': 188,
'LOAD_ATTR_WITH_HINT': 189,
- 'LOAD_CONST_IMMORTAL': 190,
- 'LOAD_CONST_MORTAL': 191,
- 'LOAD_GLOBAL_BUILTIN': 192,
- 'LOAD_GLOBAL_MODULE': 193,
- 'LOAD_SUPER_ATTR_ATTR': 194,
- 'LOAD_SUPER_ATTR_METHOD': 195,
- 'RESUME_CHECK': 196,
- 'SEND_GEN': 197,
- 'STORE_ATTR_INSTANCE_VALUE': 198,
- 'STORE_ATTR_SLOT': 199,
- 'STORE_ATTR_WITH_HINT': 200,
- 'STORE_SUBSCR_DICT': 201,
- 'STORE_SUBSCR_LIST_INT': 202,
- 'TO_BOOL_ALWAYS_TRUE': 203,
- 'TO_BOOL_BOOL': 204,
- 'TO_BOOL_INT': 205,
- 'TO_BOOL_LIST': 206,
- 'TO_BOOL_NONE': 207,
- 'TO_BOOL_STR': 208,
- 'UNPACK_SEQUENCE_LIST': 209,
- 'UNPACK_SEQUENCE_TUPLE': 210,
- 'UNPACK_SEQUENCE_TWO_TUPLE': 211,
+ 'LOAD_GLOBAL_BUILTIN': 190,
+ 'LOAD_GLOBAL_MODULE': 191,
+ 'LOAD_SUPER_ATTR_ATTR': 192,
+ 'LOAD_SUPER_ATTR_METHOD': 193,
+ 'RESUME_CHECK': 194,
+ 'SEND_GEN': 195,
+ 'STORE_ATTR_INSTANCE_VALUE': 196,
+ 'STORE_ATTR_SLOT': 197,
+ 'STORE_ATTR_WITH_HINT': 198,
+ 'STORE_SUBSCR_DICT': 199,
+ 'STORE_SUBSCR_LIST_INT': 200,
+ 'TO_BOOL_ALWAYS_TRUE': 201,
+ 'TO_BOOL_BOOL': 202,
+ 'TO_BOOL_INT': 203,
+ 'TO_BOOL_LIST': 204,
+ 'TO_BOOL_NONE': 205,
+ 'TO_BOOL_STR': 206,
+ 'UNPACK_SEQUENCE_LIST': 207,
+ 'UNPACK_SEQUENCE_TUPLE': 208,
+ 'UNPACK_SEQUENCE_TWO_TUPLE': 209,
}
opmap = {
self.assertTrue(_testcapi.is_uniquely_referenced(object()))
self.assertTrue(_testcapi.is_uniquely_referenced([]))
# Immortals
- self.assertFalse(_testcapi.is_uniquely_referenced("spanish inquisition"))
+ self.assertFalse(_testcapi.is_uniquely_referenced(()))
self.assertFalse(_testcapi.is_uniquely_referenced(42))
# CRASHES is_uniquely_referenced(NULL)
%3d RESUME_CHECK 0
%3d BUILD_LIST 0
- LOAD_CONST_MORTAL 2 ((1, 2, 3))
+ LOAD_CONST 2 ((1, 2, 3))
LIST_EXTEND 1
LOAD_SMALL_INT 3
BINARY_OP 5 (*)
%3d L2: END_FOR
POP_ITER
- LOAD_CONST_IMMORTAL 1 (None)
+ LOAD_CONST 1 (None)
RETURN_VALUE
""" % (loop_test.__code__.co_firstlineno,
loop_test.__code__.co_firstlineno + 1,
load_attr_quicken = """\
0 RESUME_CHECK 0
- 1 LOAD_CONST_IMMORTAL 0 ('a')
+ 1 LOAD_CONST 0 ('a')
LOAD_ATTR_SLOT 0 (__class__)
RETURN_VALUE
"""
self.assert_specialized(compare_op_str, "COMPARE_OP_STR")
self.assert_no_opcode(compare_op_str, "COMPARE_OP")
- @cpython_only
- @requires_specialization_ft
- def test_load_const(self):
- def load_const():
- def unused(): pass
- # Currently, the empty tuple is immortal, and the otherwise
- # unused nested function's code object is mortal. This test will
- # have to use different values if either of that changes.
- return ()
-
- load_const()
- self.assert_specialized(load_const, "LOAD_CONST_IMMORTAL")
- self.assert_specialized(load_const, "LOAD_CONST_MORTAL")
- self.assert_no_opcode(load_const, "LOAD_CONST")
@cpython_only
@requires_specialization_ft
value2 = PyStackRef_Borrow(GETLOCAL(oparg2));
}
- family(LOAD_CONST, 0) = {
- LOAD_CONST_MORTAL,
- LOAD_CONST_IMMORTAL,
- };
-
inst(LOAD_CONST, (-- value)) {
- /* We can't do this in the bytecode compiler as
- * marshalling can intern strings and make them immortal. */
- PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
- value = PyStackRef_FromPyObjectNew(obj);
-#if ENABLE_SPECIALIZATION_FT
-#ifdef Py_GIL_DISABLED
- uint8_t expected = LOAD_CONST;
- if (!_Py_atomic_compare_exchange_uint8(
- &this_instr->op.code, &expected,
- _Py_IsImmortal(obj) ? LOAD_CONST_IMMORTAL : LOAD_CONST_MORTAL)) {
- // We might lose a race with instrumentation, which we don't care about.
- assert(expected >= MIN_INSTRUMENTED_OPCODE);
- }
-#else
- if (this_instr->op.code == LOAD_CONST) {
- this_instr->op.code = _Py_IsImmortal(obj) ? LOAD_CONST_IMMORTAL : LOAD_CONST_MORTAL;
- }
-#endif
-#endif
- }
-
- inst(LOAD_CONST_MORTAL, (-- value)) {
- PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
- value = PyStackRef_FromPyObjectNewMortal(obj);
- }
-
- inst(LOAD_CONST_IMMORTAL, (-- value)) {
PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
- assert(_Py_IsImmortal(obj));
- value = PyStackRef_FromPyObjectImmortal(obj);
+ value = PyStackRef_FromPyObjectBorrow(obj);
}
replicate(4) inst(LOAD_SMALL_INT, (-- value)) {
assert(oparg < _PY_NSMALLPOSINTS);
PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg];
- value = PyStackRef_FromPyObjectImmortal(obj);
+ value = PyStackRef_FromPyObjectBorrow(obj);
}
replicate(8) inst(STORE_FAST, (value --)) {
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value)
- );
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
DEAD(value);
};
inst(STORE_FAST_LOAD_FAST, (value1 -- value2)) {
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value1)
- );
uint32_t oparg1 = oparg >> 4;
uint32_t oparg2 = oparg & 15;
_PyStackRef tmp = GETLOCAL(oparg1);
}
inst(STORE_FAST_STORE_FAST, (value2, value1 --)) {
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value1)
- );
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value2)
- );
uint32_t oparg1 = oparg >> 4;
uint32_t oparg2 = oparg & 15;
_PyStackRef tmp = GETLOCAL(oparg1);
PyStackRef_CLOSE_SPECIALIZED(sub_st, _PyLong_ExactDealloc);
DEAD(sub_st);
PyStackRef_CLOSE(str_st);
- res = PyStackRef_FromPyObjectImmortal(res_o);
+ res = PyStackRef_FromPyObjectBorrow(res_o);
}
op(_GUARD_NOS_TUPLE, (nos, unused -- nos, unused)) {
}
tier2 pure op(_LOAD_CONST_INLINE_BORROW, (ptr/4 -- value)) {
- value = PyStackRef_FromPyObjectImmortal(ptr);
+ value = PyStackRef_FromPyObjectBorrow(ptr);
}
tier2 pure op (_POP_TOP_LOAD_CONST_INLINE_BORROW, (ptr/4, pop -- value)) {
PyStackRef_CLOSE(pop);
- value = PyStackRef_FromPyObjectImmortal(ptr);
+ value = PyStackRef_FromPyObjectBorrow(ptr);
}
tier2 pure op(_POP_TWO_LOAD_CONST_INLINE_BORROW, (ptr/4, pop1, pop2 -- value)) {
PyStackRef_CLOSE(pop2);
PyStackRef_CLOSE(pop1);
- value = PyStackRef_FromPyObjectImmortal(ptr);
+ value = PyStackRef_FromPyObjectBorrow(ptr);
}
tier2 pure op(_POP_CALL_TWO_LOAD_CONST_INLINE_BORROW, (ptr/4, callable, null, pop1, pop2 -- value)) {
(void)null; // Silence compiler warnings about unused variables
DEAD(null);
PyStackRef_CLOSE(callable);
- value = PyStackRef_FromPyObjectImmortal(ptr);
+ value = PyStackRef_FromPyObjectBorrow(ptr);
}
tier2 op(_CHECK_FUNCTION, (func_version/2 -- )) {
break;
}
- /* _LOAD_CONST is not a viable micro-op for tier 2 because it uses the 'this_instr' variable */
-
- case _LOAD_CONST_MORTAL: {
- _PyStackRef value;
- oparg = CURRENT_OPARG();
- PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
- value = PyStackRef_FromPyObjectNewMortal(obj);
- stack_pointer[0] = value;
- stack_pointer += 1;
- assert(WITHIN_STACK_BOUNDS());
- break;
- }
-
- case _LOAD_CONST_IMMORTAL: {
+ case _LOAD_CONST: {
_PyStackRef value;
oparg = CURRENT_OPARG();
PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
- assert(_Py_IsImmortal(obj));
- value = PyStackRef_FromPyObjectImmortal(obj);
+ value = PyStackRef_FromPyObjectBorrow(obj);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
assert(oparg == CURRENT_OPARG());
assert(oparg < _PY_NSMALLPOSINTS);
PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg];
- value = PyStackRef_FromPyObjectImmortal(obj);
+ value = PyStackRef_FromPyObjectBorrow(obj);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
assert(oparg == CURRENT_OPARG());
assert(oparg < _PY_NSMALLPOSINTS);
PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg];
- value = PyStackRef_FromPyObjectImmortal(obj);
+ value = PyStackRef_FromPyObjectBorrow(obj);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
assert(oparg == CURRENT_OPARG());
assert(oparg < _PY_NSMALLPOSINTS);
PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg];
- value = PyStackRef_FromPyObjectImmortal(obj);
+ value = PyStackRef_FromPyObjectBorrow(obj);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
assert(oparg == CURRENT_OPARG());
assert(oparg < _PY_NSMALLPOSINTS);
PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg];
- value = PyStackRef_FromPyObjectImmortal(obj);
+ value = PyStackRef_FromPyObjectBorrow(obj);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
oparg = CURRENT_OPARG();
assert(oparg < _PY_NSMALLPOSINTS);
PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg];
- value = PyStackRef_FromPyObjectImmortal(obj);
+ value = PyStackRef_FromPyObjectBorrow(obj);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
oparg = 0;
assert(oparg == CURRENT_OPARG());
value = stack_pointer[-1];
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value)
- );
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
oparg = 1;
assert(oparg == CURRENT_OPARG());
value = stack_pointer[-1];
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value)
- );
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
oparg = 2;
assert(oparg == CURRENT_OPARG());
value = stack_pointer[-1];
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value)
- );
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
oparg = 3;
assert(oparg == CURRENT_OPARG());
value = stack_pointer[-1];
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value)
- );
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
oparg = 4;
assert(oparg == CURRENT_OPARG());
value = stack_pointer[-1];
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value)
- );
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
oparg = 5;
assert(oparg == CURRENT_OPARG());
value = stack_pointer[-1];
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value)
- );
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
oparg = 6;
assert(oparg == CURRENT_OPARG());
value = stack_pointer[-1];
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value)
- );
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
oparg = 7;
assert(oparg == CURRENT_OPARG());
value = stack_pointer[-1];
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value)
- );
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
_PyStackRef value;
oparg = CURRENT_OPARG();
value = stack_pointer[-1];
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value)
- );
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
_PyFrame_SetStackPointer(frame, stack_pointer);
PyStackRef_CLOSE(str_st);
stack_pointer = _PyFrame_GetStackPointer(frame);
- res = PyStackRef_FromPyObjectImmortal(res_o);
+ res = PyStackRef_FromPyObjectBorrow(res_o);
stack_pointer[0] = res;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
case _LOAD_CONST_INLINE_BORROW: {
_PyStackRef value;
PyObject *ptr = (PyObject *)CURRENT_OPERAND0();
- value = PyStackRef_FromPyObjectImmortal(ptr);
+ value = PyStackRef_FromPyObjectBorrow(ptr);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
_PyFrame_SetStackPointer(frame, stack_pointer);
PyStackRef_CLOSE(pop);
stack_pointer = _PyFrame_GetStackPointer(frame);
- value = PyStackRef_FromPyObjectImmortal(ptr);
+ value = PyStackRef_FromPyObjectBorrow(ptr);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
_PyFrame_SetStackPointer(frame, stack_pointer);
PyStackRef_CLOSE(pop1);
stack_pointer = _PyFrame_GetStackPointer(frame);
- value = PyStackRef_FromPyObjectImmortal(ptr);
+ value = PyStackRef_FromPyObjectBorrow(ptr);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
_PyFrame_SetStackPointer(frame, stack_pointer);
PyStackRef_CLOSE(callable);
stack_pointer = _PyFrame_GetStackPointer(frame);
- value = PyStackRef_FromPyObjectImmortal(ptr);
+ value = PyStackRef_FromPyObjectBorrow(ptr);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
_PyFrame_SetStackPointer(frame, stack_pointer);
PyStackRef_CLOSE(str_st);
stack_pointer = _PyFrame_GetStackPointer(frame);
- res = PyStackRef_FromPyObjectImmortal(res_o);
+ res = PyStackRef_FromPyObjectBorrow(res_o);
}
stack_pointer[0] = res;
stack_pointer += 1;
frame->instr_ptr = next_instr;
next_instr += 1;
INSTRUCTION_STATS(LOAD_CONST);
- PREDICTED_LOAD_CONST:;
- _Py_CODEUNIT* const this_instr = next_instr - 1;
- (void)this_instr;
- _PyStackRef value;
- PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
- value = PyStackRef_FromPyObjectNew(obj);
- #if ENABLE_SPECIALIZATION_FT
- #ifdef Py_GIL_DISABLED
- uint8_t expected = LOAD_CONST;
- if (!_Py_atomic_compare_exchange_uint8(
- &this_instr->op.code, &expected,
- _Py_IsImmortal(obj) ? LOAD_CONST_IMMORTAL : LOAD_CONST_MORTAL)) {
- assert(expected >= MIN_INSTRUMENTED_OPCODE);
- }
- #else
- if (this_instr->op.code == LOAD_CONST) {
- this_instr->op.code = _Py_IsImmortal(obj) ? LOAD_CONST_IMMORTAL : LOAD_CONST_MORTAL;
- }
- #endif
- #endif
- stack_pointer[0] = value;
- stack_pointer += 1;
- assert(WITHIN_STACK_BOUNDS());
- DISPATCH();
- }
-
- TARGET(LOAD_CONST_IMMORTAL) {
- #if Py_TAIL_CALL_INTERP
- int opcode = LOAD_CONST_IMMORTAL;
- (void)(opcode);
- #endif
- frame->instr_ptr = next_instr;
- next_instr += 1;
- INSTRUCTION_STATS(LOAD_CONST_IMMORTAL);
- static_assert(0 == 0, "incorrect cache size");
- _PyStackRef value;
- PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
- assert(_Py_IsImmortal(obj));
- value = PyStackRef_FromPyObjectImmortal(obj);
- stack_pointer[0] = value;
- stack_pointer += 1;
- assert(WITHIN_STACK_BOUNDS());
- DISPATCH();
- }
-
- TARGET(LOAD_CONST_MORTAL) {
- #if Py_TAIL_CALL_INTERP
- int opcode = LOAD_CONST_MORTAL;
- (void)(opcode);
- #endif
- frame->instr_ptr = next_instr;
- next_instr += 1;
- INSTRUCTION_STATS(LOAD_CONST_MORTAL);
- static_assert(0 == 0, "incorrect cache size");
_PyStackRef value;
PyObject *obj = GETITEM(FRAME_CO_CONSTS, oparg);
- value = PyStackRef_FromPyObjectNewMortal(obj);
+ value = PyStackRef_FromPyObjectBorrow(obj);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
_PyStackRef value;
assert(oparg < _PY_NSMALLPOSINTS);
PyObject *obj = (PyObject *)&_PyLong_SMALL_INTS[_PY_NSMALLNEGINTS + oparg];
- value = PyStackRef_FromPyObjectImmortal(obj);
+ value = PyStackRef_FromPyObjectBorrow(obj);
stack_pointer[0] = value;
stack_pointer += 1;
assert(WITHIN_STACK_BOUNDS());
INSTRUCTION_STATS(STORE_FAST);
_PyStackRef value;
value = stack_pointer[-1];
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value)
- );
_PyStackRef tmp = GETLOCAL(oparg);
GETLOCAL(oparg) = value;
stack_pointer += -1;
_PyStackRef value1;
_PyStackRef value2;
value1 = stack_pointer[-1];
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value1)
- );
uint32_t oparg1 = oparg >> 4;
uint32_t oparg2 = oparg & 15;
_PyStackRef tmp = GETLOCAL(oparg1);
_PyStackRef value1;
value1 = stack_pointer[-1];
value2 = stack_pointer[-2];
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value1)
- );
- assert(
- ((_PyFrame_GetCode(frame)->co_flags & (CO_COROUTINE | CO_GENERATOR)) == 0) ||
- PyStackRef_IsHeapSafe(value2)
- );
uint32_t oparg1 = oparg >> 4;
uint32_t oparg2 = oparg & 15;
_PyStackRef tmp = GETLOCAL(oparg1);
&&TARGET_LOAD_ATTR_PROPERTY,
&&TARGET_LOAD_ATTR_SLOT,
&&TARGET_LOAD_ATTR_WITH_HINT,
- &&TARGET_LOAD_CONST_IMMORTAL,
- &&TARGET_LOAD_CONST_MORTAL,
&&TARGET_LOAD_GLOBAL_BUILTIN,
&&TARGET_LOAD_GLOBAL_MODULE,
&&TARGET_LOAD_SUPER_ATTR_ATTR,
&&_unknown_opcode,
&&_unknown_opcode,
&&_unknown_opcode,
+ &&_unknown_opcode,
+ &&_unknown_opcode,
&&TARGET_INSTRUMENTED_END_FOR,
&&TARGET_INSTRUMENTED_POP_ITER,
&&TARGET_INSTRUMENTED_END_SEND,
Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_LOAD_BUILD_CLASS(TAIL_CALL_PARAMS);
Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_LOAD_COMMON_CONSTANT(TAIL_CALL_PARAMS);
Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_LOAD_CONST(TAIL_CALL_PARAMS);
-Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_LOAD_CONST_IMMORTAL(TAIL_CALL_PARAMS);
-Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_LOAD_CONST_MORTAL(TAIL_CALL_PARAMS);
Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_LOAD_DEREF(TAIL_CALL_PARAMS);
Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_LOAD_FAST(TAIL_CALL_PARAMS);
Py_PRESERVE_NONE_CC static PyObject *_TAIL_CALL_LOAD_FAST_AND_CLEAR(TAIL_CALL_PARAMS);
[LOAD_BUILD_CLASS] = _TAIL_CALL_LOAD_BUILD_CLASS,
[LOAD_COMMON_CONSTANT] = _TAIL_CALL_LOAD_COMMON_CONSTANT,
[LOAD_CONST] = _TAIL_CALL_LOAD_CONST,
- [LOAD_CONST_IMMORTAL] = _TAIL_CALL_LOAD_CONST_IMMORTAL,
- [LOAD_CONST_MORTAL] = _TAIL_CALL_LOAD_CONST_MORTAL,
[LOAD_DEREF] = _TAIL_CALL_LOAD_DEREF,
[LOAD_FAST] = _TAIL_CALL_LOAD_FAST,
[LOAD_FAST_AND_CLEAR] = _TAIL_CALL_LOAD_FAST_AND_CLEAR,
[125] = _TAIL_CALL_UNKNOWN_OPCODE,
[126] = _TAIL_CALL_UNKNOWN_OPCODE,
[127] = _TAIL_CALL_UNKNOWN_OPCODE,
+ [210] = _TAIL_CALL_UNKNOWN_OPCODE,
+ [211] = _TAIL_CALL_UNKNOWN_OPCODE,
[212] = _TAIL_CALL_UNKNOWN_OPCODE,
[213] = _TAIL_CALL_UNKNOWN_OPCODE,
[214] = _TAIL_CALL_UNKNOWN_OPCODE,
}
op(_LOAD_CONST, (-- value)) {
- PyObject *val = PyTuple_GET_ITEM(co->co_consts, this_instr->oparg);
- int opcode = _Py_IsImmortal(val) ? _LOAD_CONST_INLINE_BORROW : _LOAD_CONST_INLINE;
- REPLACE_OP(this_instr, opcode, 0, (uintptr_t)val);
- value = sym_new_const(ctx, val);
- }
-
- op(_LOAD_CONST_MORTAL, (-- value)) {
- PyObject *val = PyTuple_GET_ITEM(co->co_consts, this_instr->oparg);
- int opcode = _Py_IsImmortal(val) ? _LOAD_CONST_INLINE_BORROW : _LOAD_CONST_INLINE;
- REPLACE_OP(this_instr, opcode, 0, (uintptr_t)val);
- value = sym_new_const(ctx, val);
- }
-
- op(_LOAD_CONST_IMMORTAL, (-- value)) {
PyObject *val = PyTuple_GET_ITEM(co->co_consts, this_instr->oparg);
REPLACE_OP(this_instr, _LOAD_CONST_INLINE_BORROW, 0, (uintptr_t)val);
value = sym_new_const(ctx, val);
break;
}
- /* _LOAD_CONST is not a viable micro-op for tier 2 */
-
- case _LOAD_CONST_MORTAL: {
- JitOptSymbol *value;
- PyObject *val = PyTuple_GET_ITEM(co->co_consts, this_instr->oparg);
- int opcode = _Py_IsImmortal(val) ? _LOAD_CONST_INLINE_BORROW : _LOAD_CONST_INLINE;
- REPLACE_OP(this_instr, opcode, 0, (uintptr_t)val);
- value = sym_new_const(ctx, val);
- stack_pointer[0] = value;
- stack_pointer += 1;
- assert(WITHIN_STACK_BOUNDS());
- break;
- }
-
- case _LOAD_CONST_IMMORTAL: {
+ case _LOAD_CONST: {
JitOptSymbol *value;
PyObject *val = PyTuple_GET_ITEM(co->co_consts, this_instr->oparg);
REPLACE_OP(this_instr, _LOAD_CONST_INLINE_BORROW, 0, (uintptr_t)val);
"PyStackRef_CLOSE_SPECIALIZED",
"PyStackRef_DUP",
"PyStackRef_False",
- "PyStackRef_FromPyObjectImmortal",
+ "PyStackRef_FromPyObjectBorrow",
"PyStackRef_FromPyObjectNew",
"PyStackRef_FromPyObjectSteal",
"PyStackRef_IsExactly",