_PyCriticalSection_End(&_cs); \
}
+# define Py_XBEGIN_CRITICAL_SECTION(op) \
+ { \
+ _PyCriticalSection _cs_opt = {0}; \
+ _PyCriticalSection_XBegin(&_cs_opt, _PyObject_CAST(op))
+
+# define Py_XEND_CRITICAL_SECTION() \
+ _PyCriticalSection_XEnd(&_cs_opt); \
+ }
+
# define Py_BEGIN_CRITICAL_SECTION2(a, b) \
{ \
_PyCriticalSection2 _cs2; \
// The critical section APIs are no-ops with the GIL.
# define Py_BEGIN_CRITICAL_SECTION(op)
# define Py_END_CRITICAL_SECTION()
+# define Py_XBEGIN_CRITICAL_SECTION(op)
+# define Py_XEND_CRITICAL_SECTION()
# define Py_BEGIN_CRITICAL_SECTION2(a, b)
# define Py_END_CRITICAL_SECTION2()
# define _Py_CRITICAL_SECTION_ASSERT_MUTEX_LOCKED(mutex)
}
}
+static inline void
+_PyCriticalSection_XBegin(_PyCriticalSection *c, PyObject *op)
+{
+#ifdef Py_GIL_DISABLED
+ if (op != NULL) {
+ _PyCriticalSection_Begin(c, &_PyObject_CAST(op)->ob_mutex);
+ }
+#endif
+}
+
// Removes the top-most critical section from the thread's stack of critical
// sections. If the new top-most critical section is inactive, then it is
// resumed.
_PyCriticalSection_Pop(c);
}
+static inline void
+_PyCriticalSection_XEnd(_PyCriticalSection *c)
+{
+ if (c->mutex) {
+ _PyCriticalSection_End(c);
+ }
+}
+
static inline void
_PyCriticalSection2_Begin(_PyCriticalSection2 *c, PyMutex *m1, PyMutex *m2)
{
Py_END_CRITICAL_SECTION2();
assert_nogil(!PyMutex_IsLocked(&d2->ob_mutex));
+ // Optional variant behaves the same if the object is non-NULL
+ Py_XBEGIN_CRITICAL_SECTION(d1);
+ assert_nogil(PyMutex_IsLocked(&d1->ob_mutex));
+ Py_XEND_CRITICAL_SECTION();
+
+ // No-op
+ Py_XBEGIN_CRITICAL_SECTION(NULL);
+ Py_XEND_CRITICAL_SECTION();
+
Py_DECREF(d2);
Py_DECREF(d1);
Py_RETURN_NONE;