aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/Python
diff options
context:
space:
mode:
authorSam Gross <colesbury@gmail.com>2023-11-08 17:39:29 -0500
committerGitHub <noreply@github.com>2023-11-08 15:39:29 -0700
commit31c90d5838e8d6e4c47d98500a34810ccb33a6d4 (patch)
tree5be595b11ca17cf1f1bd5875a69a04b927f10dff /Python
parent0b718e6407da65b838576a2459d630824ca62155 (diff)
downloadcpython-31c90d5838e8d6e4c47d98500a34810ccb33a6d4.tar.gz
cpython-31c90d5838e8d6e4c47d98500a34810ccb33a6d4.zip
gh-111569: Implement Python critical section API (gh-111571)
Critical sections are helpers to replace the global interpreter lock with finer grained locking. They provide similar guarantees to the GIL and avoid the deadlock risk that plain locking involves. Critical sections are implicitly ended whenever the GIL would be released. They are resumed when the GIL would be acquired. Nested critical sections behave as if the sections were interleaved.
Diffstat (limited to 'Python')
-rw-r--r--Python/critical_section.c100
-rw-r--r--Python/pystate.c10
2 files changed, 110 insertions, 0 deletions
diff --git a/Python/critical_section.c b/Python/critical_section.c
new file mode 100644
index 00000000000..2214d80eeb2
--- /dev/null
+++ b/Python/critical_section.c
@@ -0,0 +1,100 @@
+#include "Python.h"
+
+#include "pycore_lock.h"
+#include "pycore_critical_section.h"
+
+static_assert(_Alignof(_PyCriticalSection) >= 4,
+ "critical section must be aligned to at least 4 bytes");
+
+void
+_PyCriticalSection_BeginSlow(_PyCriticalSection *c, PyMutex *m)
+{
+ PyThreadState *tstate = _PyThreadState_GET();
+ c->mutex = NULL;
+ c->prev = (uintptr_t)tstate->critical_section;
+ tstate->critical_section = (uintptr_t)c;
+
+ _PyMutex_LockSlow(m);
+ c->mutex = m;
+}
+
+void
+_PyCriticalSection2_BeginSlow(_PyCriticalSection2 *c, PyMutex *m1, PyMutex *m2,
+ int is_m1_locked)
+{
+ PyThreadState *tstate = _PyThreadState_GET();
+ c->base.mutex = NULL;
+ c->mutex2 = NULL;
+ c->base.prev = tstate->critical_section;
+ tstate->critical_section = (uintptr_t)c | _Py_CRITICAL_SECTION_TWO_MUTEXES;
+
+ if (!is_m1_locked) {
+ PyMutex_Lock(m1);
+ }
+ PyMutex_Lock(m2);
+ c->base.mutex = m1;
+ c->mutex2 = m2;
+}
+
+static _PyCriticalSection *
+untag_critical_section(uintptr_t tag)
+{
+ return (_PyCriticalSection *)(tag & ~_Py_CRITICAL_SECTION_MASK);
+}
+
+// Release all locks held by critical sections. This is called by
+// _PyThreadState_Detach.
+void
+_PyCriticalSection_SuspendAll(PyThreadState *tstate)
+{
+ uintptr_t *tagptr = &tstate->critical_section;
+ while (_PyCriticalSection_IsActive(*tagptr)) {
+ _PyCriticalSection *c = untag_critical_section(*tagptr);
+
+ if (c->mutex) {
+ PyMutex_Unlock(c->mutex);
+ if ((*tagptr & _Py_CRITICAL_SECTION_TWO_MUTEXES)) {
+ _PyCriticalSection2 *c2 = (_PyCriticalSection2 *)c;
+ if (c2->mutex2) {
+ PyMutex_Unlock(c2->mutex2);
+ }
+ }
+ }
+
+ *tagptr |= _Py_CRITICAL_SECTION_INACTIVE;
+ tagptr = &c->prev;
+ }
+}
+
+void
+_PyCriticalSection_Resume(PyThreadState *tstate)
+{
+ uintptr_t p = tstate->critical_section;
+ _PyCriticalSection *c = untag_critical_section(p);
+ assert(!_PyCriticalSection_IsActive(p));
+
+ PyMutex *m1 = c->mutex;
+ c->mutex = NULL;
+
+ PyMutex *m2 = NULL;
+ _PyCriticalSection2 *c2 = NULL;
+ if ((p & _Py_CRITICAL_SECTION_TWO_MUTEXES)) {
+ c2 = (_PyCriticalSection2 *)c;
+ m2 = c2->mutex2;
+ c2->mutex2 = NULL;
+ }
+
+ if (m1) {
+ PyMutex_Lock(m1);
+ }
+ if (m2) {
+ PyMutex_Lock(m2);
+ }
+
+ c->mutex = m1;
+ if (m2) {
+ c2->mutex2 = m2;
+ }
+
+ tstate->critical_section &= ~_Py_CRITICAL_SECTION_INACTIVE;
+}
diff --git a/Python/pystate.c b/Python/pystate.c
index b369a56d6d5..991d8d204a1 100644
--- a/Python/pystate.c
+++ b/Python/pystate.c
@@ -4,6 +4,7 @@
#include "Python.h"
#include "pycore_ceval.h"
#include "pycore_code.h" // stats
+#include "pycore_critical_section.h" // _PyCriticalSection_Resume()
#include "pycore_dtoa.h" // _dtoa_state_INIT()
#include "pycore_emscripten_trampoline.h" // _Py_EmscriptenTrampoline_Init()
#include "pycore_frame.h"
@@ -1911,6 +1912,12 @@ _PyThreadState_Attach(PyThreadState *tstate)
Py_FatalError("thread attach failed");
}
+ // Resume previous critical section. This acquires the lock(s) from the
+ // top-most critical section.
+ if (tstate->critical_section != 0) {
+ _PyCriticalSection_Resume(tstate);
+ }
+
#if defined(Py_DEBUG)
errno = err;
#endif
@@ -1922,6 +1929,9 @@ _PyThreadState_Detach(PyThreadState *tstate)
// XXX assert(tstate_is_alive(tstate) && tstate_is_bound(tstate));
assert(tstate->state == _Py_THREAD_ATTACHED);
assert(tstate == current_fast_get(&_PyRuntime));
+ if (tstate->critical_section != 0) {
+ _PyCriticalSection_SuspendAll(tstate);
+ }
tstate_set_detached(tstate);
tstate_deactivate(tstate);
current_fast_clear(&_PyRuntime);