// See also the Py_TPFLAGS_READY flag.
#define _PyType_IsReady(type) ((type)->tp_dict != NULL)
+extern PyObject* _PyType_AllocNoTrack(PyTypeObject *type, Py_ssize_t nitems);
+
#ifdef __cplusplus
}
#endif
static PyObject *
dict_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
{
- PyObject *self;
- PyDictObject *d;
+ assert(type != NULL);
+ assert(type->tp_alloc != NULL);
+ // dict subclasses must implement the GC protocol
+ assert(_PyType_IS_GC(type));
- assert(type != NULL && type->tp_alloc != NULL);
- self = type->tp_alloc(type, 0);
- if (self == NULL)
+ PyObject *self = type->tp_alloc(type, 0);
+ if (self == NULL) {
return NULL;
- d = (PyDictObject *)self;
-
- /* The object has been implicitly tracked by tp_alloc */
- if (type == &PyDict_Type) {
- _PyObject_GC_UNTRACK(d);
}
+ PyDictObject *d = (PyDictObject *)self;
d->ma_used = 0;
d->ma_version_tag = DICT_NEXT_VERSION();
d->ma_keys = Py_EMPTY_KEYS;
d->ma_values = empty_values;
ASSERT_CONSISTENT(d);
+
+ if (type != &PyDict_Type) {
+ // Don't track if a subclass tp_alloc is PyType_GenericAlloc()
+ if (!_PyObject_GC_IS_TRACKED(d)) {
+ _PyObject_GC_TRACK(d);
+ }
+ }
+ else {
+ // _PyType_AllocNoTrack() does not track the created object
+ assert(!_PyObject_GC_IS_TRACKED(d));
+ }
return self;
}
0, /* tp_descr_set */
0, /* tp_dictoffset */
dict_init, /* tp_init */
- PyType_GenericAlloc, /* tp_alloc */
+ _PyType_AllocNoTrack, /* tp_alloc */
dict_new, /* tp_new */
PyObject_GC_Del, /* tp_free */
.tp_vectorcall = dict_vectorcall,
#endif
-static inline void
-tuple_gc_track(PyTupleObject *op)
-{
- _PyObject_GC_TRACK(op);
-}
-
-
/* Print summary info about the state of the optimized allocator */
void
_PyTuple_DebugMallocStats(FILE *out)
#endif
}
-/* Allocate an uninitialized tuple object. Before making it public following
+/* Allocate an uninitialized tuple object. Before making it public, following
steps must be done:
- - initialize its items
- - call tuple_gc_track() on it
+
+ - Initialize its items.
+ - Call _PyObject_GC_TRACK() on it.
+
Because the empty tuple is always reused and it's already tracked by GC,
this function must not be called with size == 0 (unless from PyTuple_New()
which wraps this function).
for (Py_ssize_t i = 0; i < size; i++) {
op->ob_item[i] = NULL;
}
- tuple_gc_track(op);
+ _PyObject_GC_TRACK(op);
return (PyObject *) op;
}
items[i] = o;
}
va_end(vargs);
- tuple_gc_track(result);
+ _PyObject_GC_TRACK(result);
return (PyObject *)result;
}
Py_INCREF(item);
dst[i] = item;
}
- tuple_gc_track(tuple);
+ _PyObject_GC_TRACK(tuple);
return (PyObject *)tuple;
}
Py_INCREF(v);
dest[i] = v;
}
- tuple_gc_track(np);
+ _PyObject_GC_TRACK(np);
return (PyObject *)np;
}
p++;
}
}
- tuple_gc_track(np);
+ _PyObject_GC_TRACK(np);
return (PyObject *) np;
}
Py_ssize_t i, n;
assert(PyType_IsSubtype(type, &PyTuple_Type));
+ // tuple subclasses must implement the GC protocol
+ assert(_PyType_IS_GC(type));
+
tmp = tuple_new_impl(&PyTuple_Type, iterable);
if (tmp == NULL)
return NULL;
PyTuple_SET_ITEM(newobj, i, item);
}
Py_DECREF(tmp);
+
+ // Don't track if a subclass tp_alloc is PyType_GenericAlloc()
+ if (!_PyObject_GC_IS_TRACKED(newobj)) {
+ _PyObject_GC_TRACK(newobj);
+ }
return newobj;
}
dest[i] = it;
}
- tuple_gc_track(result);
+ _PyObject_GC_TRACK(result);
return (PyObject *)result;
}
}
}
PyObject *
-PyType_GenericAlloc(PyTypeObject *type, Py_ssize_t nitems)
+_PyType_AllocNoTrack(PyTypeObject *type, Py_ssize_t nitems)
{
PyObject *obj;
const size_t size = _PyObject_VAR_SIZE(type, nitems+1);
else {
_PyObject_InitVar((PyVarObject *)obj, type, nitems);
}
+ return obj;
+}
+
+PyObject *
+PyType_GenericAlloc(PyTypeObject *type, Py_ssize_t nitems)
+{
+ PyObject *obj = _PyType_AllocNoTrack(type, nitems);
+ if (obj == NULL) {
+ return NULL;
+ }
if (_PyType_IS_GC(type)) {
_PyObject_GC_TRACK(obj);