99#ifndef _LIBCPP___ATOMIC_CXX_ATOMIC_IMPL_H
1010#define _LIBCPP___ATOMIC_CXX_ATOMIC_IMPL_H
1111
12- #include < __atomic/is_always_lock_free.h>
1312#include < __atomic/memory_order.h>
1413#include < __config>
1514#include < __memory/addressof.h>
16- #include < __type_traits/conditional.h>
1715#include < __type_traits/is_assignable.h>
1816#include < __type_traits/is_trivially_copyable.h>
1917#include < __type_traits/remove_const.h>
2018#include < cstddef>
21- #include < cstring>
2219
2320#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
2421# pragma GCC system_header
2522#endif
2623
2724_LIBCPP_BEGIN_NAMESPACE_STD
2825
29- #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
26+ #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
3027
3128// [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
3229// the default operator= in an object is not volatile, a byte-by-byte copy
@@ -44,10 +41,6 @@ _LIBCPP_HIDE_FROM_ABI void __cxx_atomic_assign_volatile(_Tp volatile& __a_value,
4441 *__to++ = *__from++;
4542}
4643
47- #endif
48-
49- #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
50-
5144template <typename _Tp>
5245struct __cxx_atomic_base_impl {
5346 _LIBCPP_HIDE_FROM_ABI
@@ -529,289 +522,7 @@ __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern, memory_o
529522
530523#endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
531524
532- #ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
533-
534- template <typename _Tp>
535- struct __cxx_atomic_lock_impl {
536- _LIBCPP_HIDE_FROM_ABI __cxx_atomic_lock_impl () _NOEXCEPT : __a_value(), __a_lock(0 ) {}
537- _LIBCPP_HIDE_FROM_ABI _LIBCPP_CONSTEXPR explicit __cxx_atomic_lock_impl (_Tp value) _NOEXCEPT
538- : __a_value(value),
539- __a_lock(0 ) {}
540-
541- _Tp __a_value;
542- mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
543-
544- _LIBCPP_HIDE_FROM_ABI void __lock () const volatile {
545- while (1 == __cxx_atomic_exchange (&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE (true ), memory_order_acquire))
546- /* spin*/ ;
547- }
548- _LIBCPP_HIDE_FROM_ABI void __lock () const {
549- while (1 == __cxx_atomic_exchange (&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE (true ), memory_order_acquire))
550- /* spin*/ ;
551- }
552- _LIBCPP_HIDE_FROM_ABI void __unlock () const volatile {
553- __cxx_atomic_store (&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE (false ), memory_order_release);
554- }
555- _LIBCPP_HIDE_FROM_ABI void __unlock () const {
556- __cxx_atomic_store (&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE (false ), memory_order_release);
557- }
558- _LIBCPP_HIDE_FROM_ABI _Tp __read () const volatile {
559- __lock ();
560- _Tp __old;
561- __cxx_atomic_assign_volatile (__old, __a_value);
562- __unlock ();
563- return __old;
564- }
565- _LIBCPP_HIDE_FROM_ABI _Tp __read () const {
566- __lock ();
567- _Tp __old = __a_value;
568- __unlock ();
569- return __old;
570- }
571- _LIBCPP_HIDE_FROM_ABI void __read_inplace (_Tp* __dst) const volatile {
572- __lock ();
573- __cxx_atomic_assign_volatile (*__dst, __a_value);
574- __unlock ();
575- }
576- _LIBCPP_HIDE_FROM_ABI void __read_inplace (_Tp* __dst) const {
577- __lock ();
578- *__dst = __a_value;
579- __unlock ();
580- }
581- };
582-
583- template <typename _Tp>
584- _LIBCPP_HIDE_FROM_ABI void __cxx_atomic_init (volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) {
585- __cxx_atomic_assign_volatile (__a->__a_value , __val);
586- }
587- template <typename _Tp>
588- _LIBCPP_HIDE_FROM_ABI void __cxx_atomic_init (__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) {
589- __a->__a_value = __val;
590- }
591-
592- template <typename _Tp>
593- _LIBCPP_HIDE_FROM_ABI void __cxx_atomic_store (volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) {
594- __a->__lock ();
595- __cxx_atomic_assign_volatile (__a->__a_value , __val);
596- __a->__unlock ();
597- }
598- template <typename _Tp>
599- _LIBCPP_HIDE_FROM_ABI void __cxx_atomic_store (__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) {
600- __a->__lock ();
601- __a->__a_value = __val;
602- __a->__unlock ();
603- }
604-
605- template <typename _Tp>
606- _LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_load (const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
607- return __a->__read ();
608- }
609- template <typename _Tp>
610- _LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_load (const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
611- return __a->__read ();
612- }
613-
614- template <typename _Tp>
615- _LIBCPP_HIDE_FROM_ABI void
616- __cxx_atomic_load (const volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp* __dst, memory_order) {
617- __a->__read_inplace (__dst);
618- }
619- template <typename _Tp>
620- _LIBCPP_HIDE_FROM_ABI void __cxx_atomic_load (const __cxx_atomic_lock_impl<_Tp>* __a, _Tp* __dst, memory_order) {
621- __a->__read_inplace (__dst);
622- }
623-
624- template <typename _Tp>
625- _LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_exchange (volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
626- __a->__lock ();
627- _Tp __old;
628- __cxx_atomic_assign_volatile (__old, __a->__a_value );
629- __cxx_atomic_assign_volatile (__a->__a_value , __value);
630- __a->__unlock ();
631- return __old;
632- }
633- template <typename _Tp>
634- _LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_exchange (__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
635- __a->__lock ();
636- _Tp __old = __a->__a_value ;
637- __a->__a_value = __value;
638- __a->__unlock ();
639- return __old;
640- }
641-
642- template <typename _Tp>
643- _LIBCPP_HIDE_FROM_ABI bool __cxx_atomic_compare_exchange_strong (
644- volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order, memory_order) {
645- _Tp __temp;
646- __a->__lock ();
647- __cxx_atomic_assign_volatile (__temp, __a->__a_value );
648- bool __ret = (std::memcmp (&__temp, __expected, sizeof (_Tp)) == 0 );
649- if (__ret)
650- __cxx_atomic_assign_volatile (__a->__a_value , __value);
651- else
652- __cxx_atomic_assign_volatile (*__expected, __a->__a_value );
653- __a->__unlock ();
654- return __ret;
655- }
656- template <typename _Tp>
657- _LIBCPP_HIDE_FROM_ABI bool __cxx_atomic_compare_exchange_strong (
658- __cxx_atomic_lock_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order, memory_order) {
659- __a->__lock ();
660- bool __ret = (std::memcmp (&__a->__a_value , __expected, sizeof (_Tp)) == 0 );
661- if (__ret)
662- std::memcpy (&__a->__a_value , &__value, sizeof (_Tp));
663- else
664- std::memcpy (__expected, &__a->__a_value , sizeof (_Tp));
665- __a->__unlock ();
666- return __ret;
667- }
668-
669- template <typename _Tp>
670- _LIBCPP_HIDE_FROM_ABI bool __cxx_atomic_compare_exchange_weak (
671- volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order, memory_order) {
672- _Tp __temp;
673- __a->__lock ();
674- __cxx_atomic_assign_volatile (__temp, __a->__a_value );
675- bool __ret = (std::memcmp (&__temp, __expected, sizeof (_Tp)) == 0 );
676- if (__ret)
677- __cxx_atomic_assign_volatile (__a->__a_value , __value);
678- else
679- __cxx_atomic_assign_volatile (*__expected, __a->__a_value );
680- __a->__unlock ();
681- return __ret;
682- }
683- template <typename _Tp>
684- _LIBCPP_HIDE_FROM_ABI bool __cxx_atomic_compare_exchange_weak (
685- __cxx_atomic_lock_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order, memory_order) {
686- __a->__lock ();
687- bool __ret = (std::memcmp (&__a->__a_value , __expected, sizeof (_Tp)) == 0 );
688- if (__ret)
689- std::memcpy (&__a->__a_value , &__value, sizeof (_Tp));
690- else
691- std::memcpy (__expected, &__a->__a_value , sizeof (_Tp));
692- __a->__unlock ();
693- return __ret;
694- }
695-
696- template <typename _Tp, typename _Td>
697- _LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_fetch_add (volatile __cxx_atomic_lock_impl<_Tp>* __a, _Td __delta, memory_order) {
698- __a->__lock ();
699- _Tp __old;
700- __cxx_atomic_assign_volatile (__old, __a->__a_value );
701- __cxx_atomic_assign_volatile (__a->__a_value , _Tp (__old + __delta));
702- __a->__unlock ();
703- return __old;
704- }
705- template <typename _Tp, typename _Td>
706- _LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_fetch_add (__cxx_atomic_lock_impl<_Tp>* __a, _Td __delta, memory_order) {
707- __a->__lock ();
708- _Tp __old = __a->__a_value ;
709- __a->__a_value += __delta;
710- __a->__unlock ();
711- return __old;
712- }
713-
714- template <typename _Tp, typename _Td>
715- _LIBCPP_HIDE_FROM_ABI _Tp*
716- __cxx_atomic_fetch_add (volatile __cxx_atomic_lock_impl<_Tp*>* __a, ptrdiff_t __delta, memory_order) {
717- __a->__lock ();
718- _Tp* __old;
719- __cxx_atomic_assign_volatile (__old, __a->__a_value );
720- __cxx_atomic_assign_volatile (__a->__a_value , __old + __delta);
721- __a->__unlock ();
722- return __old;
723- }
724- template <typename _Tp, typename _Td>
725- _LIBCPP_HIDE_FROM_ABI _Tp* __cxx_atomic_fetch_add (__cxx_atomic_lock_impl<_Tp*>* __a, ptrdiff_t __delta, memory_order) {
726- __a->__lock ();
727- _Tp* __old = __a->__a_value ;
728- __a->__a_value += __delta;
729- __a->__unlock ();
730- return __old;
731- }
732-
733- template <typename _Tp, typename _Td>
734- _LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_fetch_sub (volatile __cxx_atomic_lock_impl<_Tp>* __a, _Td __delta, memory_order) {
735- __a->__lock ();
736- _Tp __old;
737- __cxx_atomic_assign_volatile (__old, __a->__a_value );
738- __cxx_atomic_assign_volatile (__a->__a_value , _Tp (__old - __delta));
739- __a->__unlock ();
740- return __old;
741- }
742- template <typename _Tp, typename _Td>
743- _LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_fetch_sub (__cxx_atomic_lock_impl<_Tp>* __a, _Td __delta, memory_order) {
744- __a->__lock ();
745- _Tp __old = __a->__a_value ;
746- __a->__a_value -= __delta;
747- __a->__unlock ();
748- return __old;
749- }
750-
751- template <typename _Tp>
752- _LIBCPP_HIDE_FROM_ABI _Tp
753- __cxx_atomic_fetch_and (volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __pattern, memory_order) {
754- __a->__lock ();
755- _Tp __old;
756- __cxx_atomic_assign_volatile (__old, __a->__a_value );
757- __cxx_atomic_assign_volatile (__a->__a_value , _Tp (__old & __pattern));
758- __a->__unlock ();
759- return __old;
760- }
761- template <typename _Tp>
762- _LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_fetch_and (__cxx_atomic_lock_impl<_Tp>* __a, _Tp __pattern, memory_order) {
763- __a->__lock ();
764- _Tp __old = __a->__a_value ;
765- __a->__a_value &= __pattern;
766- __a->__unlock ();
767- return __old;
768- }
769-
770- template <typename _Tp>
771- _LIBCPP_HIDE_FROM_ABI _Tp
772- __cxx_atomic_fetch_or (volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __pattern, memory_order) {
773- __a->__lock ();
774- _Tp __old;
775- __cxx_atomic_assign_volatile (__old, __a->__a_value );
776- __cxx_atomic_assign_volatile (__a->__a_value , _Tp (__old | __pattern));
777- __a->__unlock ();
778- return __old;
779- }
780- template <typename _Tp>
781- _LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_fetch_or (__cxx_atomic_lock_impl<_Tp>* __a, _Tp __pattern, memory_order) {
782- __a->__lock ();
783- _Tp __old = __a->__a_value ;
784- __a->__a_value |= __pattern;
785- __a->__unlock ();
786- return __old;
787- }
788-
789- template <typename _Tp>
790- _LIBCPP_HIDE_FROM_ABI _Tp
791- __cxx_atomic_fetch_xor (volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __pattern, memory_order) {
792- __a->__lock ();
793- _Tp __old;
794- __cxx_atomic_assign_volatile (__old, __a->__a_value );
795- __cxx_atomic_assign_volatile (__a->__a_value , _Tp (__old ^ __pattern));
796- __a->__unlock ();
797- return __old;
798- }
799- template <typename _Tp>
800- _LIBCPP_HIDE_FROM_ABI _Tp __cxx_atomic_fetch_xor (__cxx_atomic_lock_impl<_Tp>* __a, _Tp __pattern, memory_order) {
801- __a->__lock ();
802- _Tp __old = __a->__a_value ;
803- __a->__a_value ^= __pattern;
804- __a->__unlock ();
805- return __old;
806- }
807-
808- template <typename _Tp,
809- typename _Base = typename conditional<__libcpp_is_always_lock_free<_Tp>::__value,
810- __cxx_atomic_base_impl<_Tp>,
811- __cxx_atomic_lock_impl<_Tp> >::type>
812- #else
813525template <typename _Tp, typename _Base = __cxx_atomic_base_impl<_Tp> >
814- #endif // _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
815526struct __cxx_atomic_impl : public _Base {
816527 static_assert (is_trivially_copyable<_Tp>::value, " std::atomic<T> requires that 'T' be a trivially copyable type" );
817528
0 commit comments