9#ifdef HAVE_BUILTIN_ATOMIC
18typedef volatile unsigned long atomic_t;
19#define atomic_cmp_set(a,b,c) __sync_bool_compare_and_swap(a,b,c)
21#elif ( __i386__ || __i386 )
23typedef int32_t atomic_int_t;
24typedef uint32_t atomic_uint_t;
25typedef volatile atomic_uint_t atomic_t;
28static inline atomic_int_t atomic_fetch_add(atomic_t *
value, atomic_int_t
add)
30 __asm__
volatile (
"lock;" "xaddl %0, %1;" :
31 "+r" (
add) :
"m" (*value) :
"memory");
37static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set)
41 __asm__
volatile (
"lock;" "cmpxchgl %3, %1;" "sete %0;" :
42 "=a" (
res) :
"m" (*lock),
"a" (old),
"r" (set) :
"memory");
48#elif ( __amd64__ || __amd64 || __x86_64__ )
50typedef int64_t atomic_int_t;
51typedef uint64_t atomic_uint_t;
52typedef volatile atomic_uint_t atomic_t;
54static inline atomic_int_t atomic_fetch_add(atomic_t *
value, atomic_int_t
add)
56 __asm__
volatile (
"lock;" "xaddq %0, %1;" :
57 "+r" (
add) :
"m" (*value) :
"memory");
63static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set)
67 __asm__
volatile (
"lock;" "cmpxchgq %3, %1;" "sete %0;" :
68 "=a" (
res) :
"m" (*lock),
"a" (old),
"r" (set) :
"memory");
74#if (__GNUC__) && (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 2))
76#elif ( __arm__ || __arm )
78#if (__arch64__ || __arch64)
79typedef int64_t atomic_int_t;
80typedef uint64_t atomic_uint_t;
82typedef int32_t atomic_int_t;
83typedef uint32_t atomic_uint_t;
86#define atomic_cmp_set(a,b,c) __sync_bool_compare_and_swap(a,b,c)
90#elif ( __sparc__ || __sparc )
92#if (__sparcv9 || __sparcv9__)
94#if (__arch64__ || __arch64)
95typedef uint64_t atomic_uint_t;
96typedef volatile atomic_uint_t atomic_t;
98static inline atomic_uint_t atomic_cas_64(atomic_t *lock, atomic_uint_t old, atomic_uint_t
new)
100 __asm__ __volatile__(
"casx [%2], %3, %0 " :
"=&r"(
new) :
"0"(
new),
"r"(lock),
"r"(old):
"memory");
106static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set)
108 return (atomic_cas_64(lock, old, set)==old);
112typedef uint32_t atomic_uint_t;
113typedef volatile atomic_uint_t atomic_t;
115static inline atomic_uint_t atomic_cas_32(atomic_t *lock, atomic_uint_t old, atomic_uint_t
new)
117 __asm__ __volatile__(
"cas [%2], %3, %0 " :
"=&r"(
new) :
"0"(
new),
"r"(lock),
"r"(old):
"memory");
123static inline atomic_uint_t atomic_cmp_set(atomic_t *lock, atomic_uint_t old, atomic_uint_t set)
125 return (atomic_cas_32(lock, old, set)==old);
131#error Sparc v8 and predecessors are not and will not be supported (see bug report 53310)
136#error Unsupported processor. Please open a bug report (https:
140static inline int fpm_spinlock(atomic_t *lock,
int try_once)
143 return atomic_cmp_set(lock, 0, 1) ? 1 : 0;
148 if (atomic_cmp_set(lock, 0, 1)) {
159static inline int fpm_spinlock_with_max_retries(atomic_t *lock,
unsigned int max_retries)
161 unsigned int retries = 0;
164 if (atomic_cmp_set(lock, 0, 1)) {
170 if (++retries > max_retries) {
178#define fpm_unlock(lock) lock = 0