WasiAtomic.h (5591B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ 2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */ 3 /* This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 #ifndef mozilla_WasiAtomic_h 8 #define mozilla_WasiAtomic_h 9 10 #include <cstddef> // For _LIBCPP_VERSION and ptrdiff_t 11 12 // Clang >= 14 supports <atomic> for wasm targets. 13 #if _LIBCPP_VERSION >= 14000 14 # include <atomic> 15 #else 16 17 # include <cstdint> 18 19 // WASI doesn't support <atomic> and we use it as single-threaded for now. 20 // This is a stub implementation of std atomics to build WASI port of SM. 21 22 namespace std { 23 enum memory_order { 24 relaxed, 25 consume, // load-consume 26 acquire, // load-acquire 27 release, // store-release 28 acq_rel, // store-release load-acquire 29 seq_cst // store-release load-acquire 30 }; 31 32 inline constexpr auto memory_order_relaxed = memory_order::relaxed; 33 inline constexpr auto memory_order_consume = memory_order::consume; 34 inline constexpr auto memory_order_acquire = memory_order::acquire; 35 inline constexpr auto memory_order_release = memory_order::release; 36 inline constexpr auto memory_order_acq_rel = memory_order::acq_rel; 37 inline constexpr auto memory_order_seq_cst = memory_order::seq_cst; 38 39 template <class T> 40 struct atomic { 41 using value_type = T; 42 value_type value_; 43 44 atomic() noexcept = default; 45 constexpr atomic(T desired) noexcept : value_{desired} {} 46 47 atomic(const atomic&) = delete; 48 atomic& operator=(const atomic&) = delete; 49 atomic& operator=(const atomic&) volatile = delete; 50 ~atomic() noexcept = default; 51 52 T load(memory_order m = memory_order_seq_cst) const volatile noexcept { 53 return value_; 54 } 55 56 void store(T desired, 57 memory_order m = memory_order_seq_cst) volatile noexcept { 58 value_ = desired; 59 } 60 61 T operator=(T desired) volatile noexcept { return value_ = desired; } 62 63 T exchange(T desired, 64 memory_order m = memory_order_seq_cst) volatile noexcept { 65 T tmp = value_; 66 value_ = desired; 67 return tmp; 68 } 69 70 bool compare_exchange_weak(T& expected, T desired, memory_order, 71 memory_order) volatile noexcept { 72 expected = desired; 73 return true; 74 } 75 76 bool compare_exchange_weak( 77 T& expected, T desired, 78 memory_order m = memory_order_seq_cst) volatile noexcept { 79 expected = desired; 80 return true; 81 } 82 83 bool compare_exchange_strong(T& expected, T desired, memory_order, 84 memory_order) volatile noexcept { 85 expected = desired; 86 return true; 87 } 88 89 bool compare_exchange_strong( 90 T& expected, T desired, 91 memory_order m = memory_order_seq_cst) volatile noexcept { 92 expected = desired; 93 return true; 94 } 95 96 T fetch_add(T arg, memory_order m = memory_order_seq_cst) volatile noexcept { 97 T previous = value_; 98 value_ = value_ + arg; 99 return previous; 100 } 101 102 T fetch_sub(T arg, memory_order m = memory_order_seq_cst) volatile noexcept { 103 T previous = value_; 104 value_ = value_ - arg; 105 return previous; 106 } 107 108 T fetch_or(T arg, memory_order m = memory_order_seq_cst) volatile noexcept { 109 T previous = value_; 110 value_ = value_ | arg; 111 return previous; 112 } 113 114 T fetch_xor(T arg, memory_order m = memory_order_seq_cst) volatile noexcept { 115 T previous = value_; 116 value_ = value_ ^ arg; 117 return previous; 118 } 119 120 T fetch_and(T arg, memory_order m = memory_order_seq_cst) volatile noexcept { 121 T previous = value_; 122 value_ = value_ & arg; 123 return previous; 124 } 125 }; 126 127 template <class T> 128 struct atomic<T*> { 129 using value_type = T*; 130 using difference_type = ptrdiff_t; 131 132 value_type value_; 133 134 atomic() noexcept = default; 135 constexpr atomic(T* desired) noexcept : value_{desired} {} 136 atomic(const atomic&) = delete; 137 atomic& operator=(const atomic&) = delete; 138 atomic& operator=(const atomic&) volatile = delete; 139 140 T* load(memory_order m = memory_order_seq_cst) const volatile noexcept { 141 return value_; 142 } 143 144 void store(T* desired, 145 memory_order m = memory_order_seq_cst) volatile noexcept { 146 value_ = desired; 147 } 148 149 T* operator=(T* other) volatile noexcept { return value_ = other; } 150 151 T* exchange(T* desired, 152 memory_order m = memory_order_seq_cst) volatile noexcept { 153 T* previous = value_; 154 value_ = desired; 155 return previous; 156 } 157 158 bool compare_exchange_weak(T*& expected, T* desired, memory_order s, 159 memory_order f) volatile noexcept { 160 expected = desired; 161 return true; 162 } 163 164 bool compare_exchange_weak( 165 T*& expected, T* desired, 166 memory_order m = memory_order_seq_cst) volatile noexcept { 167 expected = desired; 168 return true; 169 } 170 171 bool compare_exchange_strong(T*& expected, T* desired, memory_order s, 172 memory_order f) volatile noexcept { 173 expected = desired; 174 return true; 175 } 176 177 T* fetch_add(ptrdiff_t arg, 178 memory_order m = memory_order_seq_cst) volatile noexcept { 179 T* previous = value_; 180 value_ = value_ + arg; 181 return previous; 182 } 183 184 T* fetch_sub(ptrdiff_t arg, 185 memory_order m = memory_order_seq_cst) volatile noexcept { 186 T* previous = value_; 187 value_ = value_ - arg; 188 return previous; 189 } 190 }; 191 192 using atomic_uint8_t = atomic<uint8_t>; 193 using atomic_uint16_t = atomic<uint16_t>; 194 using atomic_uint32_t = atomic<uint32_t>; 195 using atomic_uint64_t = atomic<uint64_t>; 196 197 } // namespace std 198 199 #endif 200 201 #endif // mozilla_WasiAtomic_h