61 LIB_API intptr_t
cpu_AtomicAdd(
volatile intptr_t* location, intptr_t increment);
72 LIB_API
bool cpu_CAS(
volatile intptr_t* location, intptr_t expected, intptr_t newValue);
80 inline bool cpu_CAS(
volatile T* location,
T expected,
T new_value)
82 return cpu_CAS((
volatile intptr_t*)location, (intptr_t)expected, (intptr_t)new_value);
93 #if MSC_VERSION && ARCH_X86_X64
95 #elif GCC_VERSION && ARCH_X86_X64
96 __asm__ __volatile__(
"rep; nop" : : :
"memory" );
100 #endif // #ifndef INCLUDED_CPU
const char * cpu_IdentifierString()
intptr_t cpu_AtomicAdd(volatile intptr_t *location, intptr_t increment)
add a signed value to a variable without the possibility of interference from other threads/CPUs...
i64 Status
Error handling system.
#define T(string_literal)
const Status CPU_UNKNOWN_OPCODE
const Status CPU_FEATURE_MISSING
bool cpu_CAS64(volatile i64 *location, i64 expected, i64 newValue)
const Status CPU_UNKNOWN_VENDOR
bool cpu_CAS(volatile intptr_t *location, intptr_t expected, intptr_t newValue)
atomic "compare and swap".
void cpu_Pause()
pause in spin-wait loops, as a performance optimisation.