Skip to content

Commit 2566434

Browse files
authored
gh-109693: Update _gil_runtime_state.last_holder to use pyatomic.h (#110605)
1 parent 4a53a39 commit 2566434

File tree

2 files changed

+9
-9
lines changed

2 files changed

+9
-9
lines changed

Include/internal/pycore_gil.h

+2-2
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@ extern "C" {
88
# error "this header requires Py_BUILD_CORE define"
99
#endif
1010

11-
#include "pycore_atomic.h" // _Py_atomic_address
11+
#include "pycore_atomic.h" // _Py_atomic_int
1212
#include "pycore_condvar.h" // PyCOND_T
1313

1414
#ifndef Py_HAVE_CONDVAR
@@ -25,7 +25,7 @@ struct _gil_runtime_state {
2525
unsigned long interval;
2626
/* Last PyThreadState holding / having held the GIL. This helps us
2727
know whether anyone else was scheduled after we dropped the GIL. */
28-
_Py_atomic_address last_holder;
28+
PyThreadState* last_holder;
2929
/* Whether the GIL is already taken (-1 if uninitialized). This is
3030
atomic because it can be read without any lock taken in ceval.c. */
3131
_Py_atomic_int locked;

Python/ceval_gil.c

+7-7
Original file line numberDiff line numberDiff line change
@@ -189,7 +189,7 @@ static void create_gil(struct _gil_runtime_state *gil)
189189
#ifdef FORCE_SWITCHING
190190
COND_INIT(gil->switch_cond);
191191
#endif
192-
_Py_atomic_store_relaxed(&gil->last_holder, 0);
192+
_Py_atomic_store_ptr_relaxed(&gil->last_holder, 0);
193193
_Py_ANNOTATE_RWLOCK_CREATE(&gil->locked);
194194
_Py_atomic_store_explicit(&gil->locked, 0, _Py_memory_order_release);
195195
}
@@ -233,7 +233,7 @@ drop_gil(PyInterpreterState *interp, PyThreadState *tstate)
233233
// XXX assert(tstate == NULL || !tstate->_status.cleared);
234234

235235
struct _gil_runtime_state *gil = ceval->gil;
236-
if (!_Py_atomic_load_relaxed(&gil->locked)) {
236+
if (!_Py_atomic_load_ptr_relaxed(&gil->locked)) {
237237
Py_FatalError("drop_gil: GIL is not locked");
238238
}
239239

@@ -242,7 +242,7 @@ drop_gil(PyInterpreterState *interp, PyThreadState *tstate)
242242
/* Sub-interpreter support: threads might have been switched
243243
under our feet using PyThreadState_Swap(). Fix the GIL last
244244
holder variable so that our heuristics work. */
245-
_Py_atomic_store_relaxed(&gil->last_holder, (uintptr_t)tstate);
245+
_Py_atomic_store_ptr_relaxed(&gil->last_holder, tstate);
246246
}
247247

248248
MUTEX_LOCK(gil->mutex);
@@ -263,7 +263,7 @@ drop_gil(PyInterpreterState *interp, PyThreadState *tstate)
263263
if (tstate != NULL && _Py_eval_breaker_bit_is_set(interp, _PY_GIL_DROP_REQUEST_BIT)) {
264264
MUTEX_LOCK(gil->switch_mutex);
265265
/* Not switched yet => wait */
266-
if (((PyThreadState*)_Py_atomic_load_relaxed(&gil->last_holder)) == tstate)
266+
if (((PyThreadState*)_Py_atomic_load_ptr_relaxed(&gil->last_holder)) == tstate)
267267
{
268268
assert(_PyThreadState_CheckConsistency(tstate));
269269
RESET_GIL_DROP_REQUEST(tstate->interp);
@@ -361,8 +361,8 @@ take_gil(PyThreadState *tstate)
361361
_Py_atomic_store_relaxed(&gil->locked, 1);
362362
_Py_ANNOTATE_RWLOCK_ACQUIRED(&gil->locked, /*is_write=*/1);
363363

364-
if (tstate != (PyThreadState*)_Py_atomic_load_relaxed(&gil->last_holder)) {
365-
_Py_atomic_store_relaxed(&gil->last_holder, (uintptr_t)tstate);
364+
if (tstate != (PyThreadState*)_Py_atomic_load_ptr_relaxed(&gil->last_holder)) {
365+
_Py_atomic_store_ptr_relaxed(&gil->last_holder, tstate);
366366
++gil->switch_number;
367367
}
368368

@@ -434,7 +434,7 @@ PyEval_ThreadsInitialized(void)
434434
static inline int
435435
current_thread_holds_gil(struct _gil_runtime_state *gil, PyThreadState *tstate)
436436
{
437-
if (((PyThreadState*)_Py_atomic_load_relaxed(&gil->last_holder)) != tstate) {
437+
if (((PyThreadState*)_Py_atomic_load_ptr_relaxed(&gil->last_holder)) != tstate) {
438438
return 0;
439439
}
440440
return _Py_atomic_load_relaxed(&gil->locked);

0 commit comments

Comments
 (0)