39#ifndef BLOCXX_ATOMIC_OPS_HPP_
40#define BLOCXX_ATOMIC_OPS_HPP_
41#include "blocxx/BLOCXX_config.h"
43#if defined(BLOCXX_AIX)
46#include <sys/atomic_op.h>
48#elif defined(__HP_aCC) && defined(BLOCXX_ARCH_IA64)
49#include <machine/sys/inline.h>
61#if (defined(BLOCXX_ARCH_X86) || defined(__i386__) || defined(BLOCXX_ARCH_X86_64) || defined(__x86_64__)) && defined(__GNUC__)
84 "lock ; " "decl %0; sete %1"
85 :
"=m" (v.val),
"=qm" (c)
86 :
"m" (v.val) :
"memory");
102#elif defined(__HP_aCC) && defined(BLOCXX_ARCH_IA64)
113 _Asm_fetchadd(_FASZ_W, _SEM_ACQ, &v.val, 1, _LDHINT_NONE);
117 int c = int(_Asm_fetchadd(_FASZ_W, _SEM_ACQ, &v.val,
int(-1), _LDHINT_NONE));
127 _Asm_fetchadd(_FASZ_W, _SEM_ACQ, &v.val, -1, _LDHINT_NONE);
131#elif defined(BLOCXX_AIX)
152 ::fetch_and_add(
const_cast<atomic_p
>(&v.val), 1);
158 int c = ::fetch_and_add(
const_cast<atomic_p
>(&v.val), -1);
164 int c = ::fetch_and_add(
const_cast<atomic_p
>(&v.val), 0);
169 ::fetch_and_add(
const_cast<atomic_p
>(&v.val), -1);
174#elif (defined(BLOCXX_ARCH_PPC) || defined(__ppc__)) && defined(__GNUC__)
190 __asm__ __volatile__(
195 :
"=&r" (t),
"=m" (v.val)
196 :
"r" (&v.val),
"m" (v.val)
202 __asm__ __volatile__(
220 __asm__ __volatile__(
225 :
"=&r" (c),
"=m" (v.val)
226 :
"r" (&v.val),
"m" (v.val)
232#elif defined(BLOCXX_WIN32)
234#define WIN32_LEAN_AND_MEAN
241struct BLOCXX_COMMON_API Atomic_t
243 Atomic_t() : val(0) {}
244 Atomic_t(
int i) : val(
i) {}
249 InterlockedIncrement(&v.val);
253 return InterlockedDecrement(&v.val) == 0;
261 InterlockedDecrement(&v.val);
266#elif defined(BLOCXX_HAVE_PTHREAD_SPIN_LOCK)
269#define BLOCXX_USE_PTHREAD_SPIN_LOCK_ATOMIC_OPS
294 pthread_spinlock_t spinlock;
326#define BLOCXX_USE_BLOCXX_DEFAULT_ATOMIC_OPS
bool AtomicDecAndTest(Atomic_t &v)
void AtomicInc(Atomic_t &v)
void AtomicDec(Atomic_t &v)
int AtomicGet(Atomic_t const &v)