27 #include "precompiled.h"
39 typedef volatile long* P32;
40 typedef volatile __int64* P64;
43 bool cpu_CAS(
volatile intptr_t* location, intptr_t expected, intptr_t newValue)
45 const intptr_t initial = _InterlockedCompareExchange((P32)location, newValue, expected);
46 return initial == expected;
51 const i64 initial = _InterlockedCompareExchange64((P64)location, newValue, expected);
52 return initial == expected;
55 intptr_t
cpu_AtomicAdd(
volatile intptr_t* location, intptr_t increment)
57 return _InterlockedExchangeAdd((P32)location, increment);
62 #include <libkern/OSAtomic.h>
64 intptr_t
cpu_AtomicAdd(
volatile intptr_t* location, intptr_t increment)
66 cassert(
sizeof(intptr_t) ==
sizeof(int32_t));
67 return OSAtomicAdd32Barrier(increment, (
volatile int32_t*)location);
70 bool cpu_CAS(
volatile intptr_t* location, intptr_t expected, intptr_t newValue)
72 cassert(
sizeof(intptr_t) ==
sizeof(
void*));
73 return OSAtomicCompareAndSwapPtrBarrier((
void*)expected, (
void*)newValue, (
void*
volatile*)location);
78 return OSAtomicCompareAndSwap64Barrier(expected, newValue, location);
83 intptr_t
cpu_AtomicAdd(
volatile intptr_t* location, intptr_t increment)
85 return __sync_fetch_and_add(location, increment);
88 bool cpu_CAS(
volatile intptr_t* location, intptr_t expected, intptr_t newValue)
90 return __sync_bool_compare_and_swap(location, expected, newValue);
95 return __sync_bool_compare_and_swap(location, expected, newValue);
intptr_t cpu_AtomicAdd(volatile intptr_t *location, intptr_t increment)
add a signed value to a variable without the possibility of interference from other threads/CPUs...
bool cpu_CAS64(volatile i64 *location, i64 expected, i64 newValue)
bool cpu_CAS(volatile intptr_t *location, intptr_t expected, intptr_t newValue)
atomic "compare and swap".
#define cassert(expr)
Compile-time assertion.