diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index 864e0f5d1db289..d1bdf15ef7eed6 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -296,6 +296,9 @@ _Py_AssertHoldsTstateFunc(const char *func) #define _Py_AssertHoldsTstate() #endif +void _PyThreadState_Decref(PyThreadState *tstate); +void _PyThreadState_Incref(PyThreadState *tstate); + #ifdef __cplusplus } #endif diff --git a/Lib/test/test_threading.py b/Lib/test/test_threading.py index fa666608263e27..036b12fb37ebcb 100644 --- a/Lib/test/test_threading.py +++ b/Lib/test/test_threading.py @@ -1039,6 +1039,26 @@ def checker(): self.assertEqual(threading.gettrace(), old_trace) self.assertEqual(sys.gettrace(), old_trace) + @unittest.skipUnless(support.Py_GIL_DISABLED, "only meaningful under free-threading") + def test_settrace_all_threads_race(self): + # GH-132296: settrace_all_threads() could be racy on the free-threaded build + #if the threads were concurrently deleted. + old_trace = threading.gettrace() + def dummy(*args): + pass + + def do_settrace(): + threading.settrace_all_threads(dummy) + + try: + with threading_helper.catch_threading_exception() as cm: + with threading_helper.start_threads((threading.Thread(target=do_settrace) for _ in range(8))): + pass + + self.assertIsNone(cm.exc_value) + finally: + threading.settrace_all_threads(old_trace) + def test_getprofile(self): def fn(*args): pass old_profile = threading.getprofile() diff --git a/Misc/NEWS.d/next/C_API/2025-04-08-22-08-05.gh-issue-132296.n0hZYY.rst b/Misc/NEWS.d/next/C_API/2025-04-08-22-08-05.gh-issue-132296.n0hZYY.rst new file mode 100644 index 00000000000000..b7f1721fe6474a --- /dev/null +++ b/Misc/NEWS.d/next/C_API/2025-04-08-22-08-05.gh-issue-132296.n0hZYY.rst @@ -0,0 +1,2 @@ +Fix a crash when using :c:func:`PyEval_SetTrace` on the :term:`free threaded +` build. diff --git a/Python/ceval.c b/Python/ceval.c index a59b2b7a16866d..02a99690c9a29c 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -2467,15 +2467,26 @@ PyEval_SetTraceAllThreads(Py_tracefunc func, PyObject *arg) _PyRuntimeState *runtime = &_PyRuntime; HEAD_LOCK(runtime); PyThreadState* ts = PyInterpreterState_ThreadHead(interp); + /* gh-132296: We need to prevent the thread state from being concurrently + deallocated. We can't stop-the-world because _PyEval_SetTrace() + is re-entrant. */ + _PyThreadState_Incref(ts); HEAD_UNLOCK(runtime); - while (ts) { + while (ts != NULL) { if (_PyEval_SetTrace(ts, func, arg) < 0) { PyErr_FormatUnraisable("Exception ignored in PyEval_SetTraceAllThreads"); } HEAD_LOCK(runtime); + PyThreadState *old = ts; ts = PyThreadState_Next(ts); + /* Drop the reference to the prior thread state + and acquire a reference to the next one. */ + if (ts != NULL) { + _PyThreadState_Incref(ts); + } HEAD_UNLOCK(runtime); + _PyThreadState_Decref(old); } } diff --git a/Python/pystate.c b/Python/pystate.c index ee35f0fa945f8b..b15c3d0398ff90 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -1485,6 +1485,22 @@ decref_threadstate(_PyThreadStateImpl *tstate) } } +void +_PyThreadState_Decref(PyThreadState *tstate) +{ + assert(tstate != NULL); + _PyThreadStateImpl *impl = (_PyThreadStateImpl *)tstate; + decref_threadstate(impl); +} + +void +_PyThreadState_Incref(PyThreadState *tstate) +{ + assert(tstate != NULL); + _PyThreadStateImpl *impl = (_PyThreadStateImpl *)tstate; + _Py_atomic_add_ssize(&impl->refcount, 1); +} + /* Get the thread state to a minimal consistent state. Further init happens in pylifecycle.c before it can be used. All fields not initialized here are expected to be zeroed out, @@ -1880,9 +1896,15 @@ void PyThreadState_Delete(PyThreadState *tstate) { _Py_EnsureTstateNotNULL(tstate); - tstate_verify_not_active(tstate); - tstate_delete_common(tstate, 0); - free_threadstate((_PyThreadStateImpl *)tstate); + _PyThreadStateImpl *impl = (_PyThreadStateImpl *)tstate; + if (_Py_atomic_add_ssize(&impl->refcount, -1) == 2) { + /* Treat the interpreter's reference (via the linked list) as weak, + even though it's technically strong. This is because we want + to free the thread state now, rather than wait for finalization. */ + tstate_verify_not_active(tstate); + tstate_delete_common(tstate, 0); + free_threadstate((_PyThreadStateImpl *)tstate); + } } @@ -1895,7 +1917,7 @@ _PyThreadState_DeleteCurrent(PyThreadState *tstate) #endif current_fast_clear(tstate->interp->runtime); tstate_delete_common(tstate, 1); // release GIL as part of call - free_threadstate((_PyThreadStateImpl *)tstate); + decref_threadstate((_PyThreadStateImpl *)tstate); } void