4 #include "static_assert.h" 12 #define CONST_CAST(T, V) __extension__({ \ 17 #define CONST_CAST(T, V) ((T)(V)) 25 #ifdef HAVE_C11_ATOMIC 27 #include <stdatomic.h> 29 #define almemory_order memory_order 30 #define almemory_order_relaxed memory_order_relaxed 31 #define almemory_order_consume memory_order_consume 32 #define almemory_order_acquire memory_order_acquire 33 #define almemory_order_release memory_order_release 34 #define almemory_order_acq_rel memory_order_acq_rel 35 #define almemory_order_seq_cst memory_order_seq_cst 37 #define ATOMIC(T) T _Atomic 38 #define ATOMIC_FLAG atomic_flag 40 #define ATOMIC_INIT atomic_init 41 #define ATOMIC_INIT_STATIC ATOMIC_VAR_INIT 44 #define ATOMIC_LOAD atomic_load_explicit 45 #define ATOMIC_STORE atomic_store_explicit 47 #define ATOMIC_ADD atomic_fetch_add_explicit 48 #define ATOMIC_SUB atomic_fetch_sub_explicit 50 #define ATOMIC_EXCHANGE atomic_exchange_explicit 51 #define ATOMIC_COMPARE_EXCHANGE_STRONG atomic_compare_exchange_strong_explicit 52 #define ATOMIC_COMPARE_EXCHANGE_WEAK atomic_compare_exchange_weak_explicit 54 #define ATOMIC_FLAG_TEST_AND_SET atomic_flag_test_and_set_explicit 55 #define ATOMIC_FLAG_CLEAR atomic_flag_clear_explicit 57 #define ATOMIC_THREAD_FENCE atomic_thread_fence 60 #elif defined(__GNUC__) && (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 1)) && !defined(__QNXNTO__) 63 almemory_order_relaxed,
64 almemory_order_consume,
65 almemory_order_acquire,
66 almemory_order_release,
67 almemory_order_acq_rel,
68 almemory_order_seq_cst
71 #define ATOMIC(T) struct { T volatile value; } 72 #define ATOMIC_FLAG ATOMIC(int) 74 #define ATOMIC_INIT(_val, _newval) do { (_val)->value = (_newval); } while(0) 75 #define ATOMIC_INIT_STATIC(_newval) {(_newval)} 76 #define ATOMIC_FLAG_INIT ATOMIC_INIT_STATIC(0) 78 #define ATOMIC_LOAD(_val, _MO) __extension__({ \ 79 __typeof((_val)->value) _r = (_val)->value; \ 80 __asm__ __volatile__("" ::: "memory"); \ 83 #define ATOMIC_STORE(_val, _newval, _MO) do { \ 84 __asm__ __volatile__("" ::: "memory"); \ 85 (_val)->value = (_newval); \ 88 #define ATOMIC_ADD(_val, _incr, _MO) __sync_fetch_and_add(&(_val)->value, (_incr)) 89 #define ATOMIC_SUB(_val, _decr, _MO) __sync_fetch_and_sub(&(_val)->value, (_decr)) 91 #define ATOMIC_EXCHANGE(_val, _newval, _MO) __extension__({ \ 92 __asm__ __volatile__("" ::: "memory"); \ 93 __sync_lock_test_and_set(&(_val)->value, (_newval)); \ 95 #define ATOMIC_COMPARE_EXCHANGE_STRONG(_val, _oldval, _newval, _MO1, _MO2) __extension__({ \ 96 __typeof(*(_oldval)) _o = *(_oldval); \ 97 *(_oldval) = __sync_val_compare_and_swap(&(_val)->value, _o, (_newval)); \ 101 #define ATOMIC_FLAG_TEST_AND_SET(_val, _MO) __extension__({ \ 102 __asm__ __volatile__("" ::: "memory"); \ 103 __sync_lock_test_and_set(&(_val)->value, 1); \ 105 #define ATOMIC_FLAG_CLEAR(_val, _MO) __extension__({ \ 106 __sync_lock_release(&(_val)->value); \ 107 __asm__ __volatile__("" ::: "memory"); \ 111 #define ATOMIC_THREAD_FENCE(order) do { \ 112 enum { must_be_constant = (order) }; \ 113 const int _o = must_be_constant; \ 114 if(_o > almemory_order_relaxed) \ 115 __asm__ __volatile__("" ::: "memory"); \ 119 #elif defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__)) 121 #define WRAP_ADD(S, ret, dest, incr) __asm__ __volatile__( \ 122 "lock; xadd"S" %0,(%1)" \ 124 : "r" (dest), "0" (incr) \ 127 #define WRAP_SUB(S, ret, dest, decr) __asm__ __volatile__( \ 128 "lock; xadd"S" %0,(%1)" \ 130 : "r" (dest), "0" (-(decr)) \ 134 #define WRAP_XCHG(S, ret, dest, newval) __asm__ __volatile__( \ 135 "lock; xchg"S" %0,(%1)" \ 137 : "r" (dest), "0" (newval) \ 140 #define WRAP_CMPXCHG(S, ret, dest, oldval, newval) __asm__ __volatile__( \ 141 "lock; cmpxchg"S" %2,(%1)" \ 143 : "r" (dest), "r" (newval), "0" (oldval) \ 148 enum almemory_order {
149 almemory_order_relaxed,
150 almemory_order_consume,
151 almemory_order_acquire,
152 almemory_order_release,
153 almemory_order_acq_rel,
154 almemory_order_seq_cst
157 #define ATOMIC(T) struct { T volatile value; } 159 #define ATOMIC_INIT(_val, _newval) do { (_val)->value = (_newval); } while(0) 160 #define ATOMIC_INIT_STATIC(_newval) {(_newval)} 162 #define ATOMIC_LOAD(_val, _MO) __extension__({ \ 163 __typeof((_val)->value) _r = (_val)->value; \ 164 __asm__ __volatile__("" ::: "memory"); \ 167 #define ATOMIC_STORE(_val, _newval, _MO) do { \ 168 __asm__ __volatile__("" ::: "memory"); \ 169 (_val)->value = (_newval); \ 172 #define ATOMIC_ADD(_val, _incr, _MO) __extension__({ \ 173 static_assert(sizeof((_val)->value)==4 || sizeof((_val)->value)==8, "Unsupported size!"); \ 174 __typeof((_val)->value) _r; \ 175 if(sizeof((_val)->value) == 4) WRAP_ADD("l", _r, &(_val)->value, _incr); \ 176 else if(sizeof((_val)->value) == 8) WRAP_ADD("q", _r, &(_val)->value, _incr); \ 179 #define ATOMIC_SUB(_val, _decr, _MO) __extension__({ \ 180 static_assert(sizeof((_val)->value)==4 || sizeof((_val)->value)==8, "Unsupported size!"); \ 181 __typeof((_val)->value) _r; \ 182 if(sizeof((_val)->value) == 4) WRAP_SUB("l", _r, &(_val)->value, _decr); \ 183 else if(sizeof((_val)->value) == 8) WRAP_SUB("q", _r, &(_val)->value, _decr); \ 187 #define ATOMIC_EXCHANGE(_val, _newval, _MO) __extension__({ \ 188 __typeof((_val)->value) _r; \ 189 if(sizeof((_val)->value) == 4) WRAP_XCHG("l", _r, &(_val)->value, (_newval)); \ 190 else if(sizeof((_val)->value) == 8) WRAP_XCHG("q", _r, &(_val)->value, (_newval)); \ 193 #define ATOMIC_COMPARE_EXCHANGE_STRONG(_val, _oldval, _newval, _MO1, _MO2) __extension__({ \ 194 __typeof(*(_oldval)) _old = *(_oldval); \ 195 if(sizeof((_val)->value) == 4) WRAP_CMPXCHG("l", *(_oldval), &(_val)->value, _old, (_newval)); \ 196 else if(sizeof((_val)->value) == 8) WRAP_CMPXCHG("q", *(_oldval), &(_val)->value, _old, (_newval)); \ 197 *(_oldval) == _old; \ 200 #define ATOMIC_EXCHANGE_PTR(_val, _newval, _MO) __extension__({ \ 202 if(sizeof(void*) == 4) WRAP_XCHG("l", _r, &(_val)->value, (_newval)); \ 203 else if(sizeof(void*) == 8) WRAP_XCHG("q", _r, &(_val)->value, (_newval));\ 206 #define ATOMIC_COMPARE_EXCHANGE_PTR_STRONG(_val, _oldval, _newval, _MO1, _MO2) __extension__({ \ 207 void *_old = *(_oldval); \ 208 if(sizeof(void*) == 4) WRAP_CMPXCHG("l", *(_oldval), &(_val)->value, _old, (_newval)); \ 209 else if(sizeof(void*) == 8) WRAP_CMPXCHG("q", *(_oldval), &(_val)->value, _old, (_newval)); \ 210 *(_oldval) == _old; \ 213 #define ATOMIC_THREAD_FENCE(order) do { \ 214 enum { must_be_constant = (order) }; \ 215 const int _o = must_be_constant; \ 216 if(_o > almemory_order_relaxed) \ 217 __asm__ __volatile__("" ::: "memory"); \ 221 #elif defined(_WIN32) 223 #define WIN32_LEAN_AND_MEAN 235 inline LONG AtomicAdd32(
volatile LONG *dest, LONG incr)
237 return InterlockedExchangeAdd(dest, incr);
239 inline LONGLONG AtomicAdd64(
volatile LONGLONG *dest, LONGLONG incr)
241 return InterlockedExchangeAdd64(dest, incr);
243 inline LONG AtomicSub32(
volatile LONG *dest, LONG decr)
245 return InterlockedExchangeAdd(dest, -decr);
247 inline LONGLONG AtomicSub64(
volatile LONGLONG *dest, LONGLONG decr)
249 return InterlockedExchangeAdd64(dest, -decr);
252 inline LONG AtomicSwap32(
volatile LONG *dest, LONG newval)
254 return InterlockedExchange(dest, newval);
256 inline LONGLONG AtomicSwap64(
volatile LONGLONG *dest, LONGLONG newval)
258 return InterlockedExchange64(dest, newval);
260 inline void *AtomicSwapPtr(
void *
volatile *dest,
void *newval)
262 return InterlockedExchangePointer(dest, newval);
265 inline bool CompareAndSwap32(
volatile LONG *dest, LONG newval, LONG *oldval)
268 *oldval = InterlockedCompareExchange(dest, newval, *oldval);
269 return old == *oldval;
271 inline bool CompareAndSwap64(
volatile LONGLONG *dest, LONGLONG newval, LONGLONG *oldval)
273 LONGLONG old = *oldval;
274 *oldval = InterlockedCompareExchange64(dest, newval, *oldval);
275 return old == *oldval;
277 inline bool CompareAndSwapPtr(
void *
volatile *dest,
void *newval,
void **oldval)
280 *oldval = InterlockedCompareExchangePointer(dest, newval, *oldval);
281 return old == *oldval;
284 #define WRAP_ADDSUB(T, _func, _ptr, _amnt) _func((T volatile*)(_ptr), (_amnt)) 285 #define WRAP_XCHG(T, _func, _ptr, _newval) _func((T volatile*)(_ptr), (_newval)) 286 #define WRAP_CMPXCHG(T, _func, _ptr, _newval, _oldval) _func((T volatile*)(_ptr), (_newval), (T*)(_oldval)) 289 enum almemory_order {
290 almemory_order_relaxed,
291 almemory_order_consume,
292 almemory_order_acquire,
293 almemory_order_release,
294 almemory_order_acq_rel,
295 almemory_order_seq_cst
298 #define ATOMIC(T) struct { T volatile value; } 300 #define ATOMIC_INIT(_val, _newval) do { (_val)->value = (_newval); } while(0) 301 #define ATOMIC_INIT_STATIC(_newval) {(_newval)} 303 #define ATOMIC_LOAD(_val, _MO) ((_val)->value) 304 #define ATOMIC_STORE(_val, _newval, _MO) do { \ 305 (_val)->value = (_newval); \ 308 int _al_invalid_atomic_size();
309 void *_al_invalid_atomic_ptr_size();
311 #define ATOMIC_ADD(_val, _incr, _MO) \ 312 ((sizeof((_val)->value)==4) ? WRAP_ADDSUB(LONG, AtomicAdd32, &(_val)->value, (_incr)) : \ 313 (sizeof((_val)->value)==8) ? WRAP_ADDSUB(LONGLONG, AtomicAdd64, &(_val)->value, (_incr)) : \ 314 _al_invalid_atomic_size()) 315 #define ATOMIC_SUB(_val, _decr, _MO) \ 316 ((sizeof((_val)->value)==4) ? WRAP_ADDSUB(LONG, AtomicSub32, &(_val)->value, (_decr)) : \ 317 (sizeof((_val)->value)==8) ? WRAP_ADDSUB(LONGLONG, AtomicSub64, &(_val)->value, (_decr)) : \ 318 _al_invalid_atomic_size()) 320 #define ATOMIC_EXCHANGE(_val, _newval, _MO) \ 321 ((sizeof((_val)->value)==4) ? WRAP_XCHG(LONG, AtomicSwap32, &(_val)->value, (_newval)) : \ 322 (sizeof((_val)->value)==8) ? WRAP_XCHG(LONGLONG, AtomicSwap64, &(_val)->value, (_newval)) : \ 323 (LONG)_al_invalid_atomic_size()) 324 #define ATOMIC_COMPARE_EXCHANGE_STRONG(_val, _oldval, _newval, _MO1, _MO2) \ 325 ((sizeof((_val)->value)==4) ? WRAP_CMPXCHG(LONG, CompareAndSwap32, &(_val)->value, (_newval), (_oldval)) : \ 326 (sizeof((_val)->value)==8) ? WRAP_CMPXCHG(LONGLONG, CompareAndSwap64, &(_val)->value, (_newval), (_oldval)) : \ 327 (bool)_al_invalid_atomic_size()) 329 #define ATOMIC_EXCHANGE_PTR(_val, _newval, _MO) \ 330 ((sizeof((_val)->value)==sizeof(void*)) ? AtomicSwapPtr((void*volatile*)&(_val)->value, (_newval)) : \ 331 _al_invalid_atomic_ptr_size()) 332 #define ATOMIC_COMPARE_EXCHANGE_PTR_STRONG(_val, _oldval, _newval, _MO1, _MO2)\ 333 ((sizeof((_val)->value)==sizeof(void*)) ? CompareAndSwapPtr((void*volatile*)&(_val)->value, (_newval), (void**)(_oldval)) : \ 334 (bool)_al_invalid_atomic_size()) 336 #define ATOMIC_THREAD_FENCE(order) do { \ 337 enum { must_be_constant = (order) }; \ 338 const int _o = must_be_constant; \ 339 if(_o > almemory_order_relaxed) \ 340 _ReadWriteBarrier(); \ 345 #error "No atomic functions available on this platform!" 349 #define ATOMIC_INIT(_val, _newval) ((void)0) 350 #define ATOMIC_INIT_STATIC(_newval) (0) 352 #define ATOMIC_LOAD(...) (0) 353 #define ATOMIC_STORE(...) ((void)0) 355 #define ATOMIC_ADD(...) (0) 356 #define ATOMIC_SUB(...) (0) 358 #define ATOMIC_EXCHANGE(...) (0) 359 #define ATOMIC_COMPARE_EXCHANGE_STRONG(...) (0) 361 #define ATOMIC_THREAD_FENCE(...) ((void)0) 365 #ifndef ATOMIC_EXCHANGE_PTR 366 #define ATOMIC_EXCHANGE_PTR ATOMIC_EXCHANGE 367 #define ATOMIC_COMPARE_EXCHANGE_PTR_STRONG ATOMIC_COMPARE_EXCHANGE_STRONG 368 #define ATOMIC_COMPARE_EXCHANGE_PTR_WEAK ATOMIC_COMPARE_EXCHANGE_WEAK 373 #ifndef ATOMIC_COMPARE_EXCHANGE_WEAK 374 #define ATOMIC_COMPARE_EXCHANGE_WEAK ATOMIC_COMPARE_EXCHANGE_STRONG 376 #ifndef ATOMIC_COMPARE_EXCHANGE_PTR_WEAK 377 #define ATOMIC_COMPARE_EXCHANGE_PTR_WEAK ATOMIC_COMPARE_EXCHANGE_PTR_STRONG 384 #define ATOMIC_FLAG ATOMIC(int) 385 #define ATOMIC_FLAG_INIT ATOMIC_INIT_STATIC(0) 386 #define ATOMIC_FLAG_TEST_AND_SET(_val, _MO) ATOMIC_EXCHANGE(_val, 1, _MO) 387 #define ATOMIC_FLAG_CLEAR(_val, _MO) ATOMIC_STORE(_val, 0, _MO) 391 #define ATOMIC_LOAD_SEQ(_val) ATOMIC_LOAD(_val, almemory_order_seq_cst) 392 #define ATOMIC_STORE_SEQ(_val, _newval) ATOMIC_STORE(_val, _newval, almemory_order_seq_cst) 394 #define ATOMIC_ADD_SEQ(_val, _incr) ATOMIC_ADD(_val, _incr, almemory_order_seq_cst) 395 #define ATOMIC_SUB_SEQ(_val, _decr) ATOMIC_SUB(_val, _decr, almemory_order_seq_cst) 397 #define ATOMIC_EXCHANGE_SEQ(_val, _newval) ATOMIC_EXCHANGE(_val, _newval, almemory_order_seq_cst) 398 #define ATOMIC_COMPARE_EXCHANGE_STRONG_SEQ(_val, _oldval, _newval) \ 399 ATOMIC_COMPARE_EXCHANGE_STRONG(_val, _oldval, _newval, almemory_order_seq_cst, almemory_order_seq_cst) 400 #define ATOMIC_COMPARE_EXCHANGE_WEAK_SEQ(_val, _oldval, _newval) \ 401 ATOMIC_COMPARE_EXCHANGE_WEAK(_val, _oldval, _newval, almemory_order_seq_cst, almemory_order_seq_cst) 403 #define ATOMIC_EXCHANGE_PTR_SEQ(_val, _newval) ATOMIC_EXCHANGE_PTR(_val, _newval, almemory_order_seq_cst) 404 #define ATOMIC_COMPARE_EXCHANGE_PTR_STRONG_SEQ(_val, _oldval, _newval) \ 405 ATOMIC_COMPARE_EXCHANGE_PTR_STRONG(_val, _oldval, _newval, almemory_order_seq_cst, almemory_order_seq_cst) 406 #define ATOMIC_COMPARE_EXCHANGE_PTR_WEAK_SEQ(_val, _oldval, _newval) \ 407 ATOMIC_COMPARE_EXCHANGE_PTR_WEAK(_val, _oldval, _newval, almemory_order_seq_cst, almemory_order_seq_cst) 410 typedef unsigned int uint;
411 typedef ATOMIC(uint) RefCount;
413 inline void InitRef(RefCount *ptr, uint value)
414 { ATOMIC_INIT(ptr, value); }
415 inline uint ReadRef(RefCount *ptr)
416 {
return ATOMIC_LOAD(ptr, almemory_order_acquire); }
417 inline uint IncrementRef(RefCount *ptr)
418 {
return ATOMIC_ADD(ptr, 1, almemory_order_acq_rel)+1; }
419 inline uint DecrementRef(RefCount *ptr)
420 {
return ATOMIC_SUB(ptr, 1, almemory_order_acq_rel)-1; }
427 #define ATOMIC_REPLACE_HEAD(T, _head, _entry) do { \ 428 T _first = ATOMIC_LOAD(_head, almemory_order_acquire); \ 430 ATOMIC_STORE(&(_entry)->next, _first, almemory_order_relaxed); \ 431 } while(ATOMIC_COMPARE_EXCHANGE_PTR_WEAK(_head, &_first, _entry, \ 432 almemory_order_acq_rel, almemory_order_acquire) == 0); \