@@ -32,7 +32,7 @@ extern "C" {
3232 const bool _should_lock_cs = PyList_CheckExact(_orig_seq); \
3333 PyCriticalSection _cs; \
3434 if (_should_lock_cs) { \
35- _PyCriticalSection_Begin (&_cs, _orig_seq); \
35+ PyCriticalSection_Begin (&_cs, _orig_seq); \
3636 }
3737
3838# define Py_END_CRITICAL_SECTION_SEQUENCE_FAST () \
@@ -77,10 +77,10 @@ _PyCriticalSection_Resume(PyThreadState *tstate);
7777
7878// (private) slow path for locking the mutex
7979PyAPI_FUNC (void )
80- _PyCriticalSection_BeginSlow (PyCriticalSection * c , PyMutex * m );
80+ _PyCriticalSection_BeginSlow (PyThreadState * tstate , PyCriticalSection * c , PyMutex * m );
8181
8282PyAPI_FUNC (void )
83- _PyCriticalSection2_BeginSlow (PyCriticalSection2 * c , PyMutex * m1 , PyMutex * m2 ,
83+ _PyCriticalSection2_BeginSlow (PyThreadState * tstate , PyCriticalSection2 * c , PyMutex * m1 , PyMutex * m2 ,
8484 int is_m1_locked );
8585
8686PyAPI_FUNC (void )
@@ -95,34 +95,32 @@ _PyCriticalSection_IsActive(uintptr_t tag)
9595}
9696
9797static inline void
98- _PyCriticalSection_BeginMutex (PyCriticalSection * c , PyMutex * m )
98+ _PyCriticalSection_BeginMutex (PyThreadState * tstate , PyCriticalSection * c , PyMutex * m )
9999{
100100 if (PyMutex_LockFast (m )) {
101- PyThreadState * tstate = _PyThreadState_GET ();
102101 c -> _cs_mutex = m ;
103102 c -> _cs_prev = tstate -> critical_section ;
104103 tstate -> critical_section = (uintptr_t )c ;
105104 }
106105 else {
107- _PyCriticalSection_BeginSlow (c , m );
106+ _PyCriticalSection_BeginSlow (tstate , c , m );
108107 }
109108}
110- #define PyCriticalSection_BeginMutex _PyCriticalSection_BeginMutex
109+ #define PyCriticalSection_BeginMutex (...) _PyCriticalSection_BeginMutex(_PyThreadState_GET(), __VA_ARGS__)
111110
112111static inline void
113- _PyCriticalSection_Begin (PyCriticalSection * c , PyObject * op )
112+ _PyCriticalSection_Begin (PyThreadState * tstate , PyCriticalSection * c , PyObject * op )
114113{
115- _PyCriticalSection_BeginMutex (c , & op -> ob_mutex );
114+ _PyCriticalSection_BeginMutex (tstate , c , & op -> ob_mutex );
116115}
117- #define PyCriticalSection_Begin _PyCriticalSection_Begin
116+ #define PyCriticalSection_Begin (...) _PyCriticalSection_Begin(_PyThreadState_GET(), __VA_ARGS__)
118117
119118// Removes the top-most critical section from the thread's stack of critical
120119// sections. If the new top-most critical section is inactive, then it is
121120// resumed.
122121static inline void
123- _PyCriticalSection_Pop (PyCriticalSection * c )
122+ _PyCriticalSection_Pop (PyThreadState * tstate , PyCriticalSection * c )
124123{
125- PyThreadState * tstate = _PyThreadState_GET ();
126124 uintptr_t prev = c -> _cs_prev ;
127125 tstate -> critical_section = prev ;
128126
@@ -132,7 +130,7 @@ _PyCriticalSection_Pop(PyCriticalSection *c)
132130}
133131
134132static inline void
135- _PyCriticalSection_End (PyCriticalSection * c )
133+ _PyCriticalSection_End (PyThreadState * tstate , PyCriticalSection * c )
136134{
137135 // If the mutex is NULL, we used the fast path in
138136 // _PyCriticalSection_BeginSlow for locks already held in the top-most
@@ -141,18 +139,18 @@ _PyCriticalSection_End(PyCriticalSection *c)
141139 return ;
142140 }
143141 PyMutex_Unlock (c -> _cs_mutex );
144- _PyCriticalSection_Pop (c );
142+ _PyCriticalSection_Pop (tstate , c );
145143}
146- #define PyCriticalSection_End _PyCriticalSection_End
144+ #define PyCriticalSection_End (...) _PyCriticalSection_End(_PyThreadState_GET(), __VA_ARGS__)
147145
148146static inline void
149- _PyCriticalSection2_BeginMutex (PyCriticalSection2 * c , PyMutex * m1 , PyMutex * m2 )
147+ _PyCriticalSection2_BeginMutex (PyThreadState * tstate , PyCriticalSection2 * c , PyMutex * m1 , PyMutex * m2 )
150148{
151149 if (m1 == m2 ) {
152150 // If the two mutex arguments are the same, treat this as a critical
153151 // section with a single mutex.
154152 c -> _cs_mutex2 = NULL ;
155- _PyCriticalSection_BeginMutex (& c -> _cs_base , m1 );
153+ _PyCriticalSection_BeginMutex (tstate , & c -> _cs_base , m1 );
156154 return ;
157155 }
158156
@@ -167,7 +165,6 @@ _PyCriticalSection2_BeginMutex(PyCriticalSection2 *c, PyMutex *m1, PyMutex *m2)
167165
168166 if (PyMutex_LockFast (m1 )) {
169167 if (PyMutex_LockFast (m2 )) {
170- PyThreadState * tstate = _PyThreadState_GET ();
171168 c -> _cs_base ._cs_mutex = m1 ;
172169 c -> _cs_mutex2 = m2 ;
173170 c -> _cs_base ._cs_prev = tstate -> critical_section ;
@@ -176,24 +173,24 @@ _PyCriticalSection2_BeginMutex(PyCriticalSection2 *c, PyMutex *m1, PyMutex *m2)
176173 tstate -> critical_section = p ;
177174 }
178175 else {
179- _PyCriticalSection2_BeginSlow (c , m1 , m2 , 1 );
176+ _PyCriticalSection2_BeginSlow (tstate , c , m1 , m2 , 1 );
180177 }
181178 }
182179 else {
183- _PyCriticalSection2_BeginSlow (c , m1 , m2 , 0 );
180+ _PyCriticalSection2_BeginSlow (tstate , c , m1 , m2 , 0 );
184181 }
185182}
186- #define PyCriticalSection2_BeginMutex _PyCriticalSection2_BeginMutex
183+ #define PyCriticalSection2_BeginMutex (...) _PyCriticalSection2_BeginMutex(_PyThreadState_GET(), __VA_ARGS__)
187184
188185static inline void
189- _PyCriticalSection2_Begin (PyCriticalSection2 * c , PyObject * a , PyObject * b )
186+ _PyCriticalSection2_Begin (PyThreadState * tstate , PyCriticalSection2 * c , PyObject * a , PyObject * b )
190187{
191- _PyCriticalSection2_BeginMutex (c , & a -> ob_mutex , & b -> ob_mutex );
188+ _PyCriticalSection2_BeginMutex (tstate , c , & a -> ob_mutex , & b -> ob_mutex );
192189}
193- #define PyCriticalSection2_Begin _PyCriticalSection2_Begin
190+ #define PyCriticalSection2_Begin (...) _PyCriticalSection2_Begin(_PyThreadState_GET(), __VA_ARGS__)
194191
195192static inline void
196- _PyCriticalSection2_End (PyCriticalSection2 * c )
193+ _PyCriticalSection2_End (PyThreadState * tstate , PyCriticalSection2 * c )
197194{
198195 // if mutex1 is NULL, we used the fast path in
199196 // _PyCriticalSection_BeginSlow for mutexes that are already held,
@@ -207,9 +204,9 @@ _PyCriticalSection2_End(PyCriticalSection2 *c)
207204 PyMutex_Unlock (c -> _cs_mutex2 );
208205 }
209206 PyMutex_Unlock (c -> _cs_base ._cs_mutex );
210- _PyCriticalSection_Pop (& c -> _cs_base );
207+ _PyCriticalSection_Pop (tstate , & c -> _cs_base );
211208}
212- #define PyCriticalSection2_End _PyCriticalSection2_End
209+ #define PyCriticalSection2_End (...) _PyCriticalSection2_End(_PyThreadState_GET(), __VA_ARGS__)
213210
214211static inline void
215212_PyCriticalSection_AssertHeld (PyMutex * mutex )
@@ -251,6 +248,45 @@ _PyCriticalSection_AssertHeldObj(PyObject *op)
251248
252249#endif
253250}
251+
252+ #undef Py_BEGIN_CRITICAL_SECTION
253+ # define Py_BEGIN_CRITICAL_SECTION (op ) \
254+ { \
255+ PyCriticalSection _py_cs; \
256+ PyThreadState *_cs_tstate = _PyThreadState_GET(); \
257+ _PyCriticalSection_Begin(_cs_tstate, &_py_cs, _PyObject_CAST(op))
258+
259+ #undef Py_BEGIN_CRITICAL_SECTION_MUTEX
260+ # define Py_BEGIN_CRITICAL_SECTION_MUTEX (mutex ) \
261+ { \
262+ PyCriticalSection _py_cs; \
263+ PyThreadState *_cs_tstate = _PyThreadState_GET(); \
264+ _PyCriticalSection_BeginMutex(_cs_tstate, &_py_cs, mutex)
265+
266+ #undef Py_END_CRITICAL_SECTION
267+ # define Py_END_CRITICAL_SECTION () \
268+ _PyCriticalSection_End(_cs_tstate, &_py_cs); \
269+ }
270+
271+ #undef Py_BEGIN_CRITICAL_SECTION2
272+ # define Py_BEGIN_CRITICAL_SECTION2 (a , b ) \
273+ { \
274+ PyCriticalSection2 _py_cs2; \
275+ PyThreadState *_cs_tstate = _PyThreadState_GET(); \
276+ _PyCriticalSection2_Begin(_cs_tstate, &_py_cs2, _PyObject_CAST(a), _PyObject_CAST(b))
277+
278+ #undef Py_BEGIN_CRITICAL_SECTION2_MUTEX
279+ # define Py_BEGIN_CRITICAL_SECTION2_MUTEX (m1 , m2 ) \
280+ { \
281+ PyCriticalSection2 _py_cs2; \
282+ PyThreadState *_cs_tstate = _PyThreadState_GET(); \
283+ _PyCriticalSection2_BeginMutex(_cs_tstate, &_py_cs2, m1, m2)
284+
285+ #undef Py_END_CRITICAL_SECTION2
286+ # define Py_END_CRITICAL_SECTION2 () \
287+ _PyCriticalSection2_End(_cs_tstate, &_py_cs2); \
288+ }
289+
254290#endif /* Py_GIL_DISABLED */
255291
256292#ifdef __cplusplus
0 commit comments