@@ -14187,8 +14187,11 @@ immortalize_interned(PyObject *s)
1418714187 _Py_DecRefTotal (_PyThreadState_GET ());
1418814188 }
1418914189#endif
14190- FT_ATOMIC_STORE_UINT8_RELAXED (_PyUnicode_STATE (s ).interned , SSTATE_INTERNED_IMMORTAL );
1419114190 _Py_SetImmortal (s );
14191+ // The switch to SSTATE_INTERNED_IMMORTAL must be the last thing done here
14192+ // to synchronize with the check in intern_common() that avoids locking if
14193+ // the string is already immortal.
14194+ FT_ATOMIC_STORE_UINT8 (_PyUnicode_STATE (s ).interned , SSTATE_INTERNED_IMMORTAL );
1419214195}
1419314196
1419414197static /* non-null */ PyObject *
@@ -14270,6 +14273,23 @@ intern_common(PyInterpreterState *interp, PyObject *s /* stolen */,
1427014273 assert (interned != NULL );
1427114274#ifdef Py_GIL_DISABLED
1427214275# define INTERN_MUTEX &_Py_INTERP_CACHED_OBJECT(interp, interned_mutex)
14276+ // Lock-free fast path: check if there's already an interned copy that
14277+ // is in its final immortal state.
14278+ PyObject * r ;
14279+ int res = PyDict_GetItemRef (interned , s , & r );
14280+ if (res < 0 ) {
14281+ PyErr_Clear ();
14282+ return s ;
14283+ }
14284+ if (res > 0 ) {
14285+ unsigned int state = _Py_atomic_load_uint8 (& _PyUnicode_STATE (r ).interned );
14286+ if (state == SSTATE_INTERNED_IMMORTAL ) {
14287+ Py_DECREF (s );
14288+ return r ;
14289+ }
14290+ // Not yet fully interned; fall through to the locking path.
14291+ Py_DECREF (r );
14292+ }
1427314293#endif
1427414294 FT_MUTEX_LOCK (INTERN_MUTEX );
1427514295 PyObject * t ;
@@ -14307,7 +14327,7 @@ intern_common(PyInterpreterState *interp, PyObject *s /* stolen */,
1430714327 Py_DECREF (s );
1430814328 Py_DECREF (s );
1430914329 }
14310- FT_ATOMIC_STORE_UINT8_RELAXED (_PyUnicode_STATE (s ).interned , SSTATE_INTERNED_MORTAL );
14330+ FT_ATOMIC_STORE_UINT8 (_PyUnicode_STATE (s ).interned , SSTATE_INTERNED_MORTAL );
1431114331
1431214332 /* INTERNED_MORTAL -> INTERNED_IMMORTAL (if needed) */
1431314333
0 commit comments