23 #include "precompiled.h"
31 void cpu_ConfigureFloatingPoint()
43 typedef volatile __int64* P64;
46 bool cpu_CAS(
volatile intptr_t* location, intptr_t expected, intptr_t newValue)
48 const intptr_t initial = _InterlockedCompareExchange64((P64)location, newValue, expected);
49 return initial == expected;
54 const i64 initial = _InterlockedCompareExchange64((P64)location, newValue, expected);
55 return initial == expected;
58 intptr_t
cpu_AtomicAdd(
volatile intptr_t* location, intptr_t increment)
60 return _InterlockedExchangeAdd64((P64)location, increment);
65 #include <libkern/OSAtomic.h>
67 intptr_t
cpu_AtomicAdd(
volatile intptr_t* location, intptr_t increment)
70 return OSAtomicAdd64Barrier(increment, (
volatile int64_t*)location);
73 bool cpu_CAS(
volatile intptr_t* location, intptr_t expected, intptr_t newValue)
75 cassert(
sizeof(intptr_t) ==
sizeof(
void*));
76 return OSAtomicCompareAndSwapPtrBarrier((
void*)expected, (
void*)newValue, (
void*
volatile*)location);
81 return OSAtomicCompareAndSwap64Barrier(expected, newValue, location);
86 intptr_t
cpu_AtomicAdd(
volatile intptr_t* location, intptr_t increment)
88 return __sync_fetch_and_add(location, increment);
91 bool cpu_CAS(
volatile intptr_t* location, intptr_t expected, intptr_t newValue)
93 return __sync_bool_compare_and_swap(location, expected, newValue);
98 return __sync_bool_compare_and_swap(location, expected, newValue);
intptr_t cpu_AtomicAdd(volatile intptr_t *location, intptr_t increment)
add a signed value to a variable without the possibility of interference from other threads/CPUs...
bool cpu_CAS64(volatile i64 *location, i64 expected, i64 newValue)
bool cpu_CAS(volatile intptr_t *location, intptr_t expected, intptr_t newValue)
atomic "compare and swap".
#define cassert(expr)
Compile-time assertion.