2 * Copyright 2013-present Facebook, Inc.
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
8 * http://www.apache.org/licenses/LICENSE-2.0
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
23 #include <type_traits>
25 #include <folly/ConstexprMath.h>
26 #include <folly/Traits.h>
27 #include <folly/synchronization/detail/AtomicUtils.h>
33 struct AtomicStructRaw;
35 struct AtomicStructRaw<0> {
39 struct AtomicStructRaw<1> {
40 using type = uint16_t;
43 struct AtomicStructRaw<2> {
44 using type = uint32_t;
47 struct AtomicStructRaw<3> {
48 using type = uint64_t;
52 /// AtomicStruct<T> work like C++ atomics, but can be used on any POD
54 template <typename T, template <typename> class Atom = std::atomic>
57 using Raw = _t<detail::AtomicStructRaw<constexpr_log2_ceil(sizeof(T))>>;
59 static_assert(alignof(T) <= alignof(Raw), "underlying type is under-aligned");
60 static_assert(sizeof(T) <= sizeof(Raw), "underlying type is under-sized");
62 std::is_trivial<T>::value || folly::IsTriviallyCopyable<T>::value,
63 "target type must be trivially copyable");
67 static Raw encode(T v) noexcept {
68 // we expect the compiler to optimize away the memcpy, but without
69 // it we would violate strict aliasing rules
71 memcpy(&d, &v, sizeof(T));
75 static T decode(Raw d) noexcept {
77 memcpy(&v, &d, sizeof(T));
82 AtomicStruct() = default;
83 ~AtomicStruct() = default;
84 AtomicStruct(AtomicStruct<T> const &) = delete;
85 AtomicStruct<T>& operator= (AtomicStruct<T> const &) = delete;
87 constexpr /* implicit */ AtomicStruct(T v) noexcept : data(encode(v)) {}
89 bool is_lock_free() const noexcept {
90 return data.is_lock_free();
93 bool compare_exchange_strong(
96 std::memory_order mo = std::memory_order_seq_cst) noexcept {
97 return compare_exchange_strong(
98 v0, v1, mo, detail::default_failure_memory_order(mo));
100 bool compare_exchange_strong(
103 std::memory_order success,
104 std::memory_order failure) noexcept {
106 bool rv = data.compare_exchange_strong(d0, encode(v1), success, failure);
113 bool compare_exchange_weak(
116 std::memory_order mo = std::memory_order_seq_cst) noexcept {
117 return compare_exchange_weak(
118 v0, v1, mo, detail::default_failure_memory_order(mo));
120 bool compare_exchange_weak(
123 std::memory_order success,
124 std::memory_order failure) noexcept {
126 bool rv = data.compare_exchange_weak(d0, encode(v1), success, failure);
133 T exchange(T v, std::memory_order mo = std::memory_order_seq_cst) noexcept {
134 return decode(data.exchange(encode(v), mo));
137 /* implicit */ operator T () const noexcept {
141 T load(std::memory_order mo = std::memory_order_seq_cst) const noexcept {
142 return decode(data.load(mo));
145 T operator= (T v) noexcept {
146 return decode(data = encode(v));
149 void store(T v, std::memory_order mo = std::memory_order_seq_cst) noexcept {
150 data.store(encode(v), mo);
153 // std::atomic also provides volatile versions of all of the access
154 // methods. These are callable on volatile objects, and also can
155 // theoretically have different implementations than their non-volatile
156 // counterpart. If someone wants them here they can easily be added
157 // by duplicating the above code and the corresponding unit tests.