|
| 1 | +// -*- C++ -*- |
| 2 | +//===----------------------------------------------------------------------===// |
| 3 | +// |
| 4 | +// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. |
| 5 | +// See https://llvm.org/LICENSE.txt for license information. |
| 6 | +// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception |
| 7 | +// |
| 8 | +// Kokkos v. 4.0 |
| 9 | +// Copyright (2022) National Technology & Engineering |
| 10 | +// Solutions of Sandia, LLC (NTESS). |
| 11 | +// |
| 12 | +// Under the terms of Contract DE-NA0003525 with NTESS, |
| 13 | +// the U.S. Government retains certain rights in this software. |
| 14 | +// |
| 15 | +//===---------------------------------------------------------------------===// |
| 16 | + |
| 17 | +#ifndef _LIBCPP___ATOMIC_ATOMIC_REF_H |
| 18 | +#define _LIBCPP___ATOMIC_ATOMIC_REF_H |
| 19 | + |
| 20 | +#include <__assert> |
| 21 | +#include <__atomic/check_memory_order.h> |
| 22 | +#include <__atomic/cxx_atomic_impl.h> |
| 23 | +#include <__atomic/is_always_lock_free.h> |
| 24 | +#include <__config> |
| 25 | +#include <__memory/addressof.h> |
| 26 | +#include <__type_traits/is_floating_point.h> |
| 27 | +#include <__type_traits/is_function.h> |
| 28 | +#include <__type_traits/is_nothrow_constructible.h> |
| 29 | +#include <__type_traits/is_same.h> |
| 30 | +#include <cinttypes> |
| 31 | +#include <concepts> |
| 32 | +#include <cstddef> |
| 33 | +#include <limits> |
| 34 | + |
| 35 | +#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER) |
| 36 | +# pragma GCC system_header |
| 37 | +#endif |
| 38 | + |
| 39 | +_LIBCPP_PUSH_MACROS |
| 40 | +#include <__undef_macros> |
| 41 | + |
| 42 | +_LIBCPP_BEGIN_NAMESPACE_STD |
| 43 | + |
| 44 | +#if _LIBCPP_STD_VER >= 20 |
| 45 | + |
| 46 | +template <class _Tp, bool = is_integral_v<_Tp> && !is_same_v<_Tp, bool>, bool = is_floating_point_v<_Tp>> |
| 47 | +struct __atomic_ref_base { |
| 48 | + mutable __cxx_atomic_impl<_Tp&> __a_; |
| 49 | + |
| 50 | + using value_type = _Tp; |
| 51 | + |
| 52 | + static constexpr size_t required_alignment = alignof(_Tp); |
| 53 | + |
| 54 | + static constexpr bool is_always_lock_free = __libcpp_is_always_lock_free<_Tp>::__value; |
| 55 | + |
| 56 | + _LIBCPP_HIDE_FROM_ABI bool is_lock_free() const noexcept { return __cxx_atomic_is_lock_free(sizeof(_Tp)); } |
| 57 | + |
| 58 | + _LIBCPP_HIDE_FROM_ABI void store(_Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept |
| 59 | + _LIBCPP_CHECK_STORE_MEMORY_ORDER(__order) { |
| 60 | + _LIBCPP_ASSERT_UNCATEGORIZED( |
| 61 | + __order == memory_order::relaxed || __order == memory_order::release || __order == memory_order::seq_cst, |
| 62 | + "memory order argument to atomic store operation is invalid"); |
| 63 | + __cxx_atomic_store(&__a_, __desired, __order); |
| 64 | + } |
| 65 | + |
| 66 | + _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { |
| 67 | + store(__desired); |
| 68 | + return __desired; |
| 69 | + } |
| 70 | + |
| 71 | + _LIBCPP_HIDE_FROM_ABI _Tp load(memory_order __order = memory_order::seq_cst) const noexcept |
| 72 | + _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__order) { |
| 73 | + _LIBCPP_ASSERT_UNCATEGORIZED( |
| 74 | + __order == memory_order::relaxed || __order == memory_order::consume || __order == memory_order::acquire || |
| 75 | + __order == memory_order::seq_cst, |
| 76 | + "memory order argument to atomic load operation is invalid"); |
| 77 | + return __cxx_atomic_load(&__a_, __order); |
| 78 | + } |
| 79 | + |
| 80 | + _LIBCPP_HIDE_FROM_ABI operator _Tp() const noexcept { return load(); } |
| 81 | + |
| 82 | + _LIBCPP_HIDE_FROM_ABI _Tp exchange(_Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept { |
| 83 | + return __cxx_atomic_exchange(&__a_, __desired, __order); |
| 84 | + } |
| 85 | + _LIBCPP_HIDE_FROM_ABI bool |
| 86 | + compare_exchange_weak(_Tp& __expected, _Tp __desired, memory_order __success, memory_order __failure) const noexcept |
| 87 | + _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__success, __failure) { |
| 88 | + _LIBCPP_ASSERT_UNCATEGORIZED( |
| 89 | + __failure == memory_order::relaxed || __failure == memory_order::consume || |
| 90 | + __failure == memory_order::acquire || __failure == memory_order::seq_cst, |
| 91 | + "failure memory order argument to weak atomic compare-and-exchange operation is invalid"); |
| 92 | + return __cxx_atomic_compare_exchange_weak(&__a_, &__expected, __desired, __success, __failure); |
| 93 | + } |
| 94 | + _LIBCPP_HIDE_FROM_ABI bool |
| 95 | + compare_exchange_strong(_Tp& __expected, _Tp __desired, memory_order __success, memory_order __failure) const noexcept |
| 96 | + _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__success, __failure) { |
| 97 | + _LIBCPP_ASSERT_UNCATEGORIZED( |
| 98 | + __failure == memory_order::relaxed || __failure == memory_order::consume || |
| 99 | + __failure == memory_order::acquire || __failure == memory_order::seq_cst, |
| 100 | + "failure memory order argument to strong atomic compare-and-exchange operation is invalid"); |
| 101 | + return __cxx_atomic_compare_exchange_strong(&__a_, &__expected, __desired, __success, __failure); |
| 102 | + } |
| 103 | + |
| 104 | + _LIBCPP_HIDE_FROM_ABI bool |
| 105 | + compare_exchange_weak(_Tp& __expected, _Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept { |
| 106 | + return __cxx_atomic_compare_exchange_weak(&__a_, &__expected, __desired, __order, __order); |
| 107 | + } |
| 108 | + _LIBCPP_HIDE_FROM_ABI bool |
| 109 | + compare_exchange_strong(_Tp& __expected, _Tp __desired, memory_order __order = memory_order::seq_cst) const noexcept { |
| 110 | + return __cxx_atomic_compare_exchange_strong(&__a_, &__expected, __desired, __order, __order); |
| 111 | + } |
| 112 | + |
| 113 | + _LIBCPP_HIDE_FROM_ABI void wait(_Tp __old, memory_order __order = memory_order::seq_cst) const noexcept |
| 114 | + _LIBCPP_CHECK_WAIT_MEMORY_ORDER(__order) { |
| 115 | + _LIBCPP_ASSERT_UNCATEGORIZED( |
| 116 | + __order == memory_order::relaxed || __order == memory_order::consume || __order == memory_order::acquire || |
| 117 | + __order == memory_order::seq_cst, |
| 118 | + "memory order argument to atomic wait operation is invalid"); |
| 119 | + __cxx_atomic_wait(addressof(__a_), __old, __order); |
| 120 | + } |
| 121 | + _LIBCPP_HIDE_FROM_ABI void notify_one() const noexcept { __cxx_atomic_notify_one(addressof(__a_)); } |
| 122 | + _LIBCPP_HIDE_FROM_ABI void notify_all() const noexcept { __cxx_atomic_notify_all(addressof(__a_)); } |
| 123 | + |
| 124 | + _LIBCPP_HIDE_FROM_ABI __atomic_ref_base(_Tp& __obj) : __a_(__obj) {} |
| 125 | +}; |
| 126 | + |
| 127 | +template <class _Tp> |
| 128 | +struct __atomic_ref_base<_Tp, /*_IsIntegral=*/true, /*_IsFloatingPoint=*/false> |
| 129 | + : public __atomic_ref_base<_Tp, false, false> { |
| 130 | + using __base = __atomic_ref_base<_Tp, false, false>; |
| 131 | + |
| 132 | + using difference_type = __base::value_type; |
| 133 | + |
| 134 | + _LIBCPP_HIDE_FROM_ABI __atomic_ref_base(_Tp& __obj) : __base(__obj) {} |
| 135 | + |
| 136 | + _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); } |
| 137 | + |
| 138 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept { |
| 139 | + return __cxx_atomic_fetch_add(&this->__a_, __arg, __order); |
| 140 | + } |
| 141 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept { |
| 142 | + return __cxx_atomic_fetch_sub(&this->__a_, __arg, __order); |
| 143 | + } |
| 144 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_and(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept { |
| 145 | + return __cxx_atomic_fetch_and(&this->__a_, __arg, __order); |
| 146 | + } |
| 147 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_or(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept { |
| 148 | + return __cxx_atomic_fetch_or(&this->__a_, __arg, __order); |
| 149 | + } |
| 150 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_xor(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept { |
| 151 | + return __cxx_atomic_fetch_xor(&this->__a_, __arg, __order); |
| 152 | + } |
| 153 | + |
| 154 | + _LIBCPP_HIDE_FROM_ABI _Tp operator++(int) const noexcept { return fetch_add(_Tp(1)); } |
| 155 | + _LIBCPP_HIDE_FROM_ABI _Tp operator--(int) const noexcept { return fetch_sub(_Tp(1)); } |
| 156 | + _LIBCPP_HIDE_FROM_ABI _Tp operator++() const noexcept { return fetch_add(_Tp(1)) + _Tp(1); } |
| 157 | + _LIBCPP_HIDE_FROM_ABI _Tp operator--() const noexcept { return fetch_sub(_Tp(1)) - _Tp(1); } |
| 158 | + _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __arg) const noexcept { return fetch_add(__arg) + __arg; } |
| 159 | + _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __arg) const noexcept { return fetch_sub(__arg) - __arg; } |
| 160 | + _LIBCPP_HIDE_FROM_ABI _Tp operator&=(_Tp __arg) const noexcept { return fetch_and(__arg) & __arg; } |
| 161 | + _LIBCPP_HIDE_FROM_ABI _Tp operator|=(_Tp __arg) const noexcept { return fetch_or(__arg) | __arg; } |
| 162 | + _LIBCPP_HIDE_FROM_ABI _Tp operator^=(_Tp __arg) const noexcept { return fetch_xor(__arg) ^ __arg; } |
| 163 | +}; |
| 164 | + |
| 165 | +template <class _Tp> |
| 166 | +struct __atomic_ref_base<_Tp, /*_IsIntegral=*/false, /*_IsFloatingPoint=*/true> |
| 167 | + : public __atomic_ref_base<_Tp, false, false> { |
| 168 | + using __base = __atomic_ref_base<_Tp, false, false>; |
| 169 | + |
| 170 | + using difference_type = __base::value_type; |
| 171 | + |
| 172 | + _LIBCPP_HIDE_FROM_ABI __atomic_ref_base(_Tp& __obj) : __base(__obj) {} |
| 173 | + |
| 174 | + _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); } |
| 175 | + |
| 176 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_add(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept { |
| 177 | + return __cxx_atomic_fetch_add(&this->__a_, __arg, __order); |
| 178 | + } |
| 179 | + _LIBCPP_HIDE_FROM_ABI _Tp fetch_sub(_Tp __arg, memory_order __order = memory_order_seq_cst) const noexcept { |
| 180 | + return __cxx_atomic_fetch_sub(&this->__a_, __arg, __order); |
| 181 | + } |
| 182 | + |
| 183 | + _LIBCPP_HIDE_FROM_ABI _Tp operator+=(_Tp __arg) const noexcept { return fetch_add(__arg) + __arg; } |
| 184 | + _LIBCPP_HIDE_FROM_ABI _Tp operator-=(_Tp __arg) const noexcept { return fetch_sub(__arg) - __arg; } |
| 185 | +}; |
| 186 | + |
| 187 | +template <class _Tp> |
| 188 | +struct atomic_ref : public __atomic_ref_base<_Tp> { |
| 189 | + static_assert(is_trivially_copyable<_Tp>::value, "std::atomic_ref<T> requires that 'T' be a trivially copyable type"); |
| 190 | + |
| 191 | + using __base = __atomic_ref_base<_Tp>; |
| 192 | + |
| 193 | + _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp& __obj) : __base(__obj) { |
| 194 | + _LIBCPP_ASSERT_UNCATEGORIZED((uintptr_t)addressof(__obj) % __base::required_alignment == 0, |
| 195 | + "atomic_ref ctor: referenced object must be aligned to required_alignment"); |
| 196 | + } |
| 197 | + |
| 198 | + _LIBCPP_HIDE_FROM_ABI atomic_ref(const atomic_ref&) noexcept = default; |
| 199 | + |
| 200 | + _LIBCPP_HIDE_FROM_ABI _Tp operator=(_Tp __desired) const noexcept { return __base::operator=(__desired); } |
| 201 | + |
| 202 | + atomic_ref& operator=(const atomic_ref&) = delete; |
| 203 | +}; |
| 204 | + |
| 205 | +template <class _Tp> |
| 206 | +struct atomic_ref<_Tp*> : public __atomic_ref_base<_Tp*> { |
| 207 | + using __base = __atomic_ref_base<_Tp*>; |
| 208 | + |
| 209 | + using difference_type = ptrdiff_t; |
| 210 | + |
| 211 | + _LIBCPP_HIDE_FROM_ABI _Tp* fetch_add(ptrdiff_t __arg, memory_order __order = memory_order_seq_cst) const noexcept { |
| 212 | + return __cxx_atomic_fetch_add(&this->__a_, __arg, __order); |
| 213 | + } |
| 214 | + _LIBCPP_HIDE_FROM_ABI _Tp* fetch_sub(ptrdiff_t __arg, memory_order __order = memory_order_seq_cst) const noexcept { |
| 215 | + return __cxx_atomic_fetch_sub(&this->__a_, __arg, __order); |
| 216 | + } |
| 217 | + |
| 218 | + _LIBCPP_HIDE_FROM_ABI _Tp* operator++(int) const noexcept { return fetch_add(1); } |
| 219 | + _LIBCPP_HIDE_FROM_ABI _Tp* operator--(int) const noexcept { return fetch_sub(1); } |
| 220 | + _LIBCPP_HIDE_FROM_ABI _Tp* operator++() const noexcept { return fetch_add(1) + 1; } |
| 221 | + _LIBCPP_HIDE_FROM_ABI _Tp* operator--() const noexcept { return fetch_sub(1) - 1; } |
| 222 | + _LIBCPP_HIDE_FROM_ABI _Tp* operator+=(ptrdiff_t __arg) const noexcept { return fetch_add(__arg) + __arg; } |
| 223 | + _LIBCPP_HIDE_FROM_ABI _Tp* operator-=(ptrdiff_t __arg) const noexcept { return fetch_sub(__arg) - __arg; } |
| 224 | + |
| 225 | + _LIBCPP_HIDE_FROM_ABI explicit atomic_ref(_Tp*& __ptr) : __base(__ptr) {} |
| 226 | + |
| 227 | + _LIBCPP_HIDE_FROM_ABI _Tp* operator=(_Tp* __desired) const noexcept { return __base::operator=(__desired); } |
| 228 | + |
| 229 | + atomic_ref& operator=(const atomic_ref&) = delete; |
| 230 | +}; |
| 231 | + |
| 232 | +#endif // _LIBCPP_STD_VER >= 20 |
| 233 | + |
| 234 | +_LIBCPP_END_NAMESPACE_STD |
| 235 | + |
| 236 | +_LIBCPP_POP_MACROS |
| 237 | + |
| 238 | +#endif // _LIBCPP__ATOMIC_ATOMIC_REF_H |
0 commit comments