Skip to content

Commit a16cf5b

Browse files
committed
control test with original main pyatomic.h
1 parent ce439d7 commit a16cf5b

File tree

5 files changed

+3
-173
lines changed

5 files changed

+3
-173
lines changed

Include/cpython/pyatomic.h

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -545,13 +545,6 @@ static inline Py_ssize_t
545545
_Py_atomic_load_ssize_acquire(const Py_ssize_t *obj);
546546

547547

548-
// --- _Py_atomic_memcpy / _Py_atomic_memmove ------------
549-
550-
static inline void *
551-
_Py_atomic_memcpy_ptr_store_relaxed(void *dest, void *src, size_t n);
552-
553-
static inline void *
554-
_Py_atomic_memmove_ptr_store_relaxed(void *dest, void *src, size_t n);
555548

556549

557550
// --- _Py_atomic_fence ------------------------------------------------------

Include/cpython/pyatomic_gcc.h

Lines changed: 1 addition & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -600,61 +600,7 @@ static inline Py_ssize_t
600600
_Py_atomic_load_ssize_acquire(const Py_ssize_t *obj)
601601
{ return __atomic_load_n(obj, __ATOMIC_ACQUIRE); }
602602

603-
604-
// --- _Py_atomic_memcpy / _Py_atomic_memmove ------------
605-
606-
static inline void *
607-
_Py_atomic_memcpy_ptr_store_relaxed(void *dest, void *src, size_t n)
608-
{
609-
assert(_Py_IS_ALIGNED(dest, sizeof(void *)));
610-
assert(_Py_IS_ALIGNED(src, sizeof(void *)));
611-
assert(n % sizeof(void *) == 0);
612-
613-
if (dest != src) {
614-
void **dest_ = (void **)dest;
615-
void **src_ = (void **)src;
616-
void **end = dest_ + n / sizeof(void *);
617-
618-
for (; dest_ != end; dest_++, src_++) {
619-
_Py_atomic_store_ptr_relaxed(dest_, *src_);
620-
}
621-
}
622-
623-
return dest;
624-
}
625-
626-
static inline void *
627-
_Py_atomic_memmove_ptr_store_relaxed(void *dest, void *src, size_t n)
628-
{
629-
assert(_Py_IS_ALIGNED(dest, sizeof(void *)));
630-
assert(_Py_IS_ALIGNED(src, sizeof(void *)));
631-
assert(n % sizeof(void *) == 0);
632-
633-
if (dest < src || dest >= (void *)((char *)src + n)) {
634-
void **dest_ = (void **)dest;
635-
void **src_ = (void **)src;
636-
void **end = dest_ + n / sizeof(void *);
637-
638-
for (; dest_ != end; dest_++, src_++) {
639-
_Py_atomic_store_ptr_relaxed(dest_, *src_);
640-
}
641-
}
642-
else if (dest > src) {
643-
n = n / sizeof(void *) - 1;
644-
void **dest_ = (void **)dest + n;
645-
void **src_ = (void **)src + n;
646-
void **end = (void **)dest - 1;
647-
648-
for (; dest_ != end; dest_--, src_--) {
649-
_Py_atomic_store_ptr_relaxed(dest_, *src_);
650-
}
651-
}
652-
653-
return dest;
654-
}
655-
656-
657-
// --- _Py_atomic_fence ------------------------------------------------------
603+
z// --- _Py_atomic_fence ------------------------------------------------------
658604

659605
static inline void
660606
_Py_atomic_fence_seq_cst(void)

Include/cpython/pyatomic_msc.h

Lines changed: 0 additions & 54 deletions
Original file line numberDiff line numberDiff line change
@@ -1154,60 +1154,6 @@ _Py_atomic_load_ssize_acquire(const Py_ssize_t *obj)
11541154
#endif
11551155
}
11561156

1157-
1158-
// --- _Py_atomic_memcpy / _Py_atomic_memmove ------------
1159-
1160-
static inline void *
1161-
_Py_atomic_memcpy_ptr_store_relaxed(void *dest, void *src, size_t n)
1162-
{
1163-
assert(_Py_IS_ALIGNED(dest, sizeof(void *)));
1164-
assert(_Py_IS_ALIGNED(src, sizeof(void *)));
1165-
assert(n % sizeof(void *) == 0);
1166-
1167-
if (dest != src) {
1168-
void **dest_ = (void **)dest;
1169-
void **src_ = (void **)src;
1170-
void **end = dest_ + n / sizeof(void *);
1171-
1172-
for (; dest_ != end; dest_++, src_++) {
1173-
_Py_atomic_store_ptr_relaxed(dest_, *src_);
1174-
}
1175-
}
1176-
1177-
return dest;
1178-
}
1179-
1180-
static inline void *
1181-
_Py_atomic_memmove_ptr_store_relaxed(void *dest, void *src, size_t n)
1182-
{
1183-
assert(_Py_IS_ALIGNED(dest, sizeof(void *)));
1184-
assert(_Py_IS_ALIGNED(src, sizeof(void *)));
1185-
assert(n % sizeof(void *) == 0);
1186-
1187-
if (dest < src || dest >= (void *)((char *)src + n)) {
1188-
void **dest_ = (void **)dest;
1189-
void **src_ = (void **)src;
1190-
void **end = dest_ + n / sizeof(void *);
1191-
1192-
for (; dest_ != end; dest_++, src_++) {
1193-
_Py_atomic_store_ptr_relaxed(dest_, *src_);
1194-
}
1195-
}
1196-
else if (dest > src) {
1197-
n = n / sizeof(void *) - 1;
1198-
void **dest_ = (void **)dest + n;
1199-
void **src_ = (void **)src + n;
1200-
void **end = (void **)dest - 1;
1201-
1202-
for (; dest_ != end; dest_--, src_--) {
1203-
_Py_atomic_store_ptr_relaxed(dest_, *src_);
1204-
}
1205-
}
1206-
1207-
return dest;
1208-
}
1209-
1210-
12111157
// --- _Py_atomic_fence ------------------------------------------------------
12121158

12131159
static inline void

Include/cpython/pyatomic_std.h

Lines changed: 0 additions & 53 deletions
Original file line numberDiff line numberDiff line change
@@ -1088,59 +1088,6 @@ _Py_atomic_load_ssize_acquire(const Py_ssize_t *obj)
10881088
}
10891089

10901090

1091-
// --- _Py_atomic_memcpy / _Py_atomic_memmove ------------
1092-
1093-
static inline void *
1094-
_Py_atomic_memcpy_ptr_store_relaxed(void *dest, void *src, size_t n)
1095-
{
1096-
assert(_Py_IS_ALIGNED(dest, sizeof(void *)));
1097-
assert(_Py_IS_ALIGNED(src, sizeof(void *)));
1098-
assert(n % sizeof(void *) == 0);
1099-
1100-
if (dest != src) {
1101-
void **dest_ = (void **)dest;
1102-
void **src_ = (void **)src;
1103-
void **end = dest_ + n / sizeof(void *);
1104-
1105-
for (; dest_ != end; dest_++, src_++) {
1106-
_Py_atomic_store_ptr_relaxed(dest_, *src_);
1107-
}
1108-
}
1109-
1110-
return dest;
1111-
}
1112-
1113-
static inline void *
1114-
_Py_atomic_memmove_ptr_store_relaxed(void *dest, void *src, size_t n)
1115-
{
1116-
assert(_Py_IS_ALIGNED(dest, sizeof(void *)));
1117-
assert(_Py_IS_ALIGNED(src, sizeof(void *)));
1118-
assert(n % sizeof(void *) == 0);
1119-
1120-
if (dest < src || dest >= (void *)((char *)src + n)) {
1121-
void **dest_ = (void **)dest;
1122-
void **src_ = (void **)src;
1123-
void **end = dest_ + n / sizeof(void *);
1124-
1125-
for (; dest_ != end; dest_++, src_++) {
1126-
_Py_atomic_store_ptr_relaxed(dest_, *src_);
1127-
}
1128-
}
1129-
else if (dest > src) {
1130-
n = n / sizeof(void *) - 1;
1131-
void **dest_ = (void **)dest + n;
1132-
void **src_ = (void **)src + n;
1133-
void **end = (void **)dest - 1;
1134-
1135-
for (; dest_ != end; dest_--, src_--) {
1136-
_Py_atomic_store_ptr_relaxed(dest_, *src_);
1137-
}
1138-
}
1139-
1140-
return dest;
1141-
}
1142-
1143-
11441091
// --- _Py_atomic_fence ------------------------------------------------------
11451092

11461093
static inline void

Include/internal/pycore_pyatomic_ft_wrappers.h

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -114,10 +114,8 @@ extern "C" {
114114
#define FT_MUTEX_LOCK(lock) PyMutex_Lock(lock)
115115
#define FT_MUTEX_UNLOCK(lock) PyMutex_Unlock(lock)
116116

117-
#define FT_ATOMIC_MEMCPY_PTR_STORE_RELAXED(dest, src, n) \
118-
_Py_atomic_memcpy_ptr_store_relaxed(dest, src, (size_t)(n))
119-
#define FT_ATOMIC_MEMMOVE_PTR_STORE_RELAXED(dest, src, n) \
120-
_Py_atomic_memmove_ptr_store_relaxed(dest, src, (size_t)(n))
117+
#define FT_ATOMIC_MEMCPY_PTR_STORE_RELAXED(dest, src, n) memcpy(dest, src, n)
118+
#define FT_ATOMIC_MEMMOVE_PTR_STORE_RELAXED(dest, src, n) memmove(dest, src, n)
121119

122120

123121
#else

0 commit comments

Comments
 (0)