_PyObject_ASSERT_FROM(op, !_PyObject_GC_IS_TRACKED(op),
"object already tracked by the garbage collector",
filename, lineno, __func__);
-
+#ifdef Py_GIL_DISABLED
+ op->ob_gc_bits |= _PyGC_BITS_TRACKED;
+#else
PyGC_Head *gc = _Py_AS_GC(op);
_PyObject_ASSERT_FROM(op,
(gc->_gc_prev & _PyGC_PREV_MASK_COLLECTING) == 0,
"object is in generation which is garbage collected",
filename, lineno, __func__);
-#ifdef Py_GIL_DISABLED
- op->ob_gc_bits |= _PyGC_BITS_TRACKED;
-#else
PyInterpreterState *interp = _PyInterpreterState_GET();
PyGC_Head *generation0 = interp->gc.generation0;
PyGC_Head *last = (PyGC_Head*)(generation0->_gc_prev);
static inline size_t
_PyType_PreHeaderSize(PyTypeObject *tp)
{
- return _PyType_IS_GC(tp) * sizeof(PyGC_Head) +
- _PyType_HasFeature(tp, Py_TPFLAGS_PREHEADER) * 2 * sizeof(PyObject *);
+ return (
+#ifndef Py_GIL_DISABLED
+ _PyType_IS_GC(tp) * sizeof(PyGC_Head) +
+#endif
+ _PyType_HasFeature(tp, Py_TPFLAGS_PREHEADER) * 2 * sizeof(PyObject *)
+ );
}
void _PyObject_GC_Link(PyObject *op);
PyObject * _PyObject_GetInstanceAttribute(PyObject *obj, PyDictValues *values,
PyObject *name);
+#ifdef Py_GIL_DISABLED
+# define MANAGED_DICT_OFFSET (((Py_ssize_t)sizeof(PyObject *))*-1)
+# define MANAGED_WEAKREF_OFFSET (((Py_ssize_t)sizeof(PyObject *))*-2)
+#else
+# define MANAGED_DICT_OFFSET (((Py_ssize_t)sizeof(PyObject *))*-3)
+# define MANAGED_WEAKREF_OFFSET (((Py_ssize_t)sizeof(PyObject *))*-4)
+#endif
+
typedef union {
PyObject *dict;
/* Use a char* to generate a warning if directly assigning a PyDictValues */
_PyObject_DictOrValuesPointer(PyObject *obj)
{
assert(Py_TYPE(obj)->tp_flags & Py_TPFLAGS_MANAGED_DICT);
- return ((PyDictOrValues *)obj)-3;
+ return (PyDictOrValues *)((char *)obj + MANAGED_DICT_OFFSET);
}
static inline int
ptr->values = ((char *)values) - 1;
}
-#define MANAGED_WEAKREF_OFFSET (((Py_ssize_t)sizeof(PyObject *))*-4)
-
extern PyObject ** _PyObject_ComputedDictPointer(PyObject *);
extern void _PyObject_FreeInstanceAttributes(PyObject *obj);
extern int _PyObject_IsInstanceDictEmpty(PyObject *);
struct _PyMutex { uint8_t v; };
struct _object {
- // ob_tid stores the thread id (or zero). It is also used by the GC to
- // store linked lists and the computed "gc_refs" refcount.
+ // ob_tid stores the thread id (or zero). It is also used by the GC and the
+ // trashcan mechanism as a linked list pointer and by the GC to store the
+ // computed "gc_refs" refcount.
uintptr_t ob_tid;
uint16_t _padding;
struct _PyMutex ob_mutex; // per-object lock
self.longdigit = sys.int_info.sizeof_digit
import _testinternalcapi
self.gc_headsize = _testinternalcapi.SIZEOF_PYGC_HEAD
+ self.managed_pre_header_size = _testinternalcapi.SIZEOF_MANAGED_PRE_HEADER
check_sizeof = test.support.check_sizeof
def __sizeof__(self):
return int(self)
self.assertEqual(sys.getsizeof(OverflowSizeof(sys.maxsize)),
- sys.maxsize + self.gc_headsize*2)
+ sys.maxsize + self.gc_headsize + self.managed_pre_header_size)
with self.assertRaises(OverflowError):
sys.getsizeof(OverflowSizeof(sys.maxsize + 1))
with self.assertRaises(ValueError):
# type
# static type: PyTypeObject
fmt = 'P2nPI13Pl4Pn9Pn12PIPc'
- s = vsize('2P' + fmt)
+ s = vsize(fmt)
check(int, s)
# class
s = vsize(fmt + # PyTypeObject
--- /dev/null
+The free-threaded build no longer allocates space for the ``PyGC_Head``
+structure in objects that support cyclic garbage collection. A number of
+other fields and data structures are used as replacements, including
+``ob_gc_bits``, ``ob_tid``, and mimalloc internal data structures.
return 1;
}
+ Py_ssize_t sizeof_gc_head = 0;
+#ifndef Py_GIL_DISABLED
+ sizeof_gc_head = sizeof(PyGC_Head);
+#endif
+
if (PyModule_Add(module, "SIZEOF_PYGC_HEAD",
- PyLong_FromSsize_t(sizeof(PyGC_Head))) < 0) {
+ PyLong_FromSsize_t(sizeof_gc_head)) < 0) {
+ return 1;
+ }
+
+ if (PyModule_Add(module, "SIZEOF_MANAGED_PRE_HEADER",
+ PyLong_FromSsize_t(2 * sizeof(PyObject*))) < 0) {
return 1;
}
_PyObject_ASSERT(op, _PyObject_IS_GC(op));
_PyObject_ASSERT(op, !_PyObject_GC_IS_TRACKED(op));
_PyObject_ASSERT(op, Py_REFCNT(op) == 0);
+#ifdef Py_GIL_DISABLED
+ _PyObject_ASSERT(op, op->ob_tid == 0);
+ op->ob_tid = (uintptr_t)trash->delete_later;
+#else
_PyGCHead_SET_PREV(_Py_AS_GC(op), (PyGC_Head*)trash->delete_later);
+#endif
trash->delete_later = op;
}
PyObject *op = trash->delete_later;
destructor dealloc = Py_TYPE(op)->tp_dealloc;
- trash->delete_later =
- (PyObject*) _PyGCHead_PREV(_Py_AS_GC(op));
+#ifdef Py_GIL_DISABLED
+ trash->delete_later = (PyObject*) op->ob_tid;
+ op->ob_tid = 0;
+#else
+ trash->delete_later = (PyObject*) _PyGCHead_PREV(_Py_AS_GC(op));
+#endif
/* Call the deallocator directly. This used to try to
* fool Py_DECREF into calling it indirectly, but
// Automatically choose the generation that needs collecting.
#define GENERATION_AUTO (-1)
-// A linked-list of objects using the `ob_tid` field as the next pointer.
+// A linked list of objects using the `ob_tid` field as the next pointer.
+// The linked list pointers are distinct from any real thread ids, because the
+// thread ids returned by _Py_ThreadId() are also pointers to distinct objects.
+// No thread will confuse its own id with a linked list pointer.
struct worklist {
uintptr_t head;
};
struct visitor_args *arg)
{
// Offset of PyObject header from start of memory block.
- Py_ssize_t offset_base = sizeof(PyGC_Head);
+ Py_ssize_t offset_base = 0;
if (_PyMem_DebugEnabled()) {
// The debug allocator adds two words at the beginning of each block.
offset_base += 2 * sizeof(size_t);
Py_ssize_t refcount = Py_REFCNT(op);
_PyObject_ASSERT(op, refcount >= 0);
- // Add the actual refcount to ob_tid.
+ // We repurpose ob_tid to compute "gc_refs", the number of external
+ // references to the object (i.e., from outside the GC heaps). This means
+ // that ob_tid is no longer a valid thread id until it is restored by
+ // scan_heap_visitor(). Until then, we cannot use the standard reference
+ // counting functions or allow other threads to run Python code.
gc_maybe_init_refs(op);
+
+ // Add the actual refcount to ob_tid.
gc_add_refs(op, refcount);
// Subtract internal references from ob_tid. Objects with ob_tid > 0
if (mem == NULL) {
return _PyErr_NoMemory(tstate);
}
- ((PyObject **)mem)[0] = NULL;
- ((PyObject **)mem)[1] = NULL;
+ if (presize) {
+ ((PyObject **)mem)[0] = NULL;
+ ((PyObject **)mem)[1] = NULL;
+ }
PyObject *op = (PyObject *)(mem + presize);
_PyObject_GC_Link(op);
return op;
return (size_t)-1;
}
- return (size_t)size + _PyType_PreHeaderSize(Py_TYPE(o));
+ size_t presize = 0;
+ if (!Py_IS_TYPE(o, &PyType_Type) ||
+ PyType_HasFeature((PyTypeObject *)o, Py_TPFLAGS_HEAPTYPE))
+ {
+ /* Add the size of the pre-header if "o" is not a static type */
+ presize = _PyType_PreHeaderSize(Py_TYPE(o));
+ }
+
+ return (size_t)size + presize;
}
static PyObject *
def _sizeof_void_p():
return gdb.lookup_type('void').pointer().sizeof
+def _managed_dict_offset():
+ # See pycore_object.h
+ pyobj = gdb.lookup_type("PyObject")
+ if any(field.name == "ob_ref_local" for field in pyobj.fields()):
+ return -1 * _sizeof_void_p()
+ else:
+ return -3 * _sizeof_void_p()
+
Py_TPFLAGS_MANAGED_DICT = (1 << 4)
Py_TPFLAGS_HEAPTYPE = (1 << 9)
if dictoffset < 0:
if int_from_int(typeobj.field('tp_flags')) & Py_TPFLAGS_MANAGED_DICT:
assert dictoffset == -1
- dictoffset = -3 * _sizeof_void_p()
+ dictoffset = _managed_dict_offset()
else:
type_PyVarObject_ptr = gdb.lookup_type('PyVarObject').pointer()
tsize = int_from_int(self._gdbval.cast(type_PyVarObject_ptr)['ob_size'])
has_values = int_from_int(typeobj.field('tp_flags')) & Py_TPFLAGS_MANAGED_DICT
if not has_values:
return None
- charptrptr_t = _type_char_ptr().pointer()
- ptr = self._gdbval.cast(charptrptr_t) - 3
- char_ptr = ptr.dereference()
+ ptr = self._gdbval.cast(_type_char_ptr()) + _managed_dict_offset()
+ char_ptr = ptr.cast(_type_char_ptr().pointer()).dereference()
if (int(char_ptr) & 1) == 0:
return None
char_ptr += 1