@@ -545,6 +545,59 @@ static inline Py_ssize_t
545545_Py_atomic_load_ssize_acquire (const Py_ssize_t * obj );
546546
547547
548+ // --- _Py_atomic_memcpy / _Py_atomic_memmove ------------
549+
550+ static inline void *
551+ _Py_atomic_memcpy_ptr_store_relaxed (void * dest , void * src , size_t n )
552+ {
553+ // assert(((uintptr_t)dest & (sizeof (void *) - 1)) == 0);
554+ // assert(((uintptr_t)src & (sizeof (void *) - 1)) == 0);
555+ // assert(n % sizeof(void *) == 0);
556+
557+ if (dest != src ) {
558+ void * * dest_ = (void * * )dest ;
559+ void * * src_ = (void * * )src ;
560+ void * * end = dest_ + n / sizeof (void * );
561+
562+ for (; dest_ != end ; dest_ ++ , src_ ++ ) {
563+ _Py_atomic_store_ptr_relaxed (dest_ , * src_ );
564+ }
565+ }
566+
567+ return dest ;
568+ }
569+
570+ static inline void *
571+ _Py_atomic_memmove_ptr_store_relaxed (void * dest , void * src , size_t n )
572+ {
573+ // assert(((uintptr_t)dest & (sizeof (void *) - 1)) == 0);
574+ // assert(((uintptr_t)src & (sizeof (void *) - 1)) == 0);
575+ // assert(n % sizeof(void *) == 0);
576+
577+ if (dest < src || dest >= (void * )((char * )src + n )) {
578+ void * * dest_ = (void * * )dest ;
579+ void * * src_ = (void * * )src ;
580+ void * * end = dest_ + n / sizeof (void * );
581+
582+ for (; dest_ != end ; dest_ ++ , src_ ++ ) {
583+ _Py_atomic_store_ptr_relaxed (dest_ , * src_ );
584+ }
585+ }
586+ else if (dest > src ) {
587+ n = n / sizeof (void * ) - 1 ;
588+ void * * dest_ = (void * * )dest + n ;
589+ void * * src_ = (void * * )src + n ;
590+ void * * end = (void * * )dest - 1 ;
591+
592+ for (; dest_ != end ; dest_ -- , src_ -- ) {
593+ _Py_atomic_store_ptr_relaxed (dest_ , * src_ );
594+ }
595+ }
596+
597+ return dest ;
598+ }
599+
600+
548601
549602
550603// --- _Py_atomic_fence ------------------------------------------------------
@@ -612,56 +665,3 @@ static inline void _Py_atomic_fence_release(void);
612665#else
613666# error "long must be 4 or 8 bytes in size"
614667#endif // SIZEOF_LONG
615-
616-
617- // --- _Py_atomic_memcpy / _Py_atomic_memmove ------------
618-
619- static inline void *
620- _Py_atomic_memcpy_ptr_store_relaxed (void * dest , void * src , size_t n )
621- {
622- // assert(((uintptr_t)dest & (sizeof (void *) - 1)) == 0);
623- // assert(((uintptr_t)src & (sizeof (void *) - 1)) == 0);
624- // assert(n % sizeof(void *) == 0);
625-
626- if (dest != src ) {
627- void * * dest_ = (void * * )dest ;
628- void * * src_ = (void * * )src ;
629- void * * end = dest_ + n / sizeof (void * );
630-
631- for (; dest_ != end ; dest_ ++ , src_ ++ ) {
632- _Py_atomic_store_ptr_relaxed (dest_ , * src_ );
633- }
634- }
635-
636- return dest ;
637- }
638-
639- static inline void *
640- _Py_atomic_memmove_ptr_store_relaxed (void * dest , void * src , size_t n )
641- {
642- // assert(((uintptr_t)dest & (sizeof (void *) - 1)) == 0);
643- // assert(((uintptr_t)src & (sizeof (void *) - 1)) == 0);
644- // assert(n % sizeof(void *) == 0);
645-
646- if (dest < src || dest >= (void * )((char * )src + n )) {
647- void * * dest_ = (void * * )dest ;
648- void * * src_ = (void * * )src ;
649- void * * end = dest_ + n / sizeof (void * );
650-
651- for (; dest_ != end ; dest_ ++ , src_ ++ ) {
652- _Py_atomic_store_ptr_relaxed (dest_ , * src_ );
653- }
654- }
655- else if (dest > src ) {
656- n = n / sizeof (void * ) - 1 ;
657- void * * dest_ = (void * * )dest + n ;
658- void * * src_ = (void * * )src + n ;
659- void * * end = (void * * )dest - 1 ;
660-
661- for (; dest_ != end ; dest_ -- , src_ -- ) {
662- _Py_atomic_store_ptr_relaxed (dest_ , * src_ );
663- }
664- }
665-
666- return dest ;
667- }
0 commit comments