|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#include <config.h> |
|
|
|
|
|
|
|
|
#include "glthread/spin.h" |
|
|
|
|
|
#include <errno.h> |
|
|
#if defined _AIX |
|
|
# include <sys/atomic_op.h> |
|
|
#endif |
|
|
#if 0x590 <= __SUNPRO_C && __STDC__ |
|
|
# define asm __asm |
|
|
#endif |
|
|
|
|
|
#if defined _WIN32 && !defined __CYGWIN__ |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#else |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# if (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 7) \ |
|
|
|| __clang_major__ > 3 || (__clang_major__ == 3 && __clang_minor__ >= 1)) \ |
|
|
&& !defined __ibmxl__ |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# if 1 |
|
|
|
|
|
|
|
|
void |
|
|
glthread_spinlock_init (gl_spinlock_t *lock) |
|
|
{ |
|
|
__atomic_store_n (lock, 0, __ATOMIC_SEQ_CST); |
|
|
} |
|
|
|
|
|
void |
|
|
glthread_spinlock_lock (gl_spinlock_t *lock) |
|
|
{ |
|
|
|
|
|
gl_spinlock_t zero; |
|
|
while (!(zero = 0, |
|
|
__atomic_compare_exchange_n (lock, &zero, 1, false, |
|
|
__ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST))) |
|
|
; |
|
|
} |
|
|
|
|
|
int |
|
|
glthread_spinlock_unlock (gl_spinlock_t *lock) |
|
|
{ |
|
|
|
|
|
gl_spinlock_t one = 1; |
|
|
if (!__atomic_compare_exchange_n (lock, &one, 0, false, |
|
|
__ATOMIC_SEQ_CST, __ATOMIC_SEQ_CST)) |
|
|
return EINVAL; |
|
|
return 0; |
|
|
} |
|
|
|
|
|
# else |
|
|
|
|
|
|
|
|
|
|
|
void |
|
|
glthread_spinlock_init (gl_spinlock_t *lock) |
|
|
{ |
|
|
__atomic_clear (lock, __ATOMIC_SEQ_CST); |
|
|
} |
|
|
|
|
|
void |
|
|
glthread_spinlock_lock (gl_spinlock_t *lock) |
|
|
{ |
|
|
while (__atomic_test_and_set (lock, __ATOMIC_SEQ_CST)) |
|
|
; |
|
|
} |
|
|
|
|
|
int |
|
|
glthread_spinlock_unlock (gl_spinlock_t *lock) |
|
|
{ |
|
|
__atomic_clear (lock, __ATOMIC_SEQ_CST); |
|
|
return 0; |
|
|
} |
|
|
|
|
|
# endif |
|
|
|
|
|
# elif (((__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 1)) \ |
|
|
|| __clang_major__ >= 3) \ |
|
|
&& HAVE_ATOMIC_COMPARE_AND_SWAP_GCC41) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void |
|
|
glthread_spinlock_init (gl_spinlock_t *lock) |
|
|
{ |
|
|
volatile unsigned int *vp = lock; |
|
|
*vp = 0; |
|
|
__sync_synchronize (); |
|
|
} |
|
|
|
|
|
void |
|
|
glthread_spinlock_lock (gl_spinlock_t *lock) |
|
|
{ |
|
|
|
|
|
while (__sync_val_compare_and_swap (lock, 0, 1) != 0) |
|
|
; |
|
|
} |
|
|
|
|
|
int |
|
|
glthread_spinlock_unlock (gl_spinlock_t *lock) |
|
|
{ |
|
|
|
|
|
if (__sync_val_compare_and_swap (lock, 1, 0) != 1) |
|
|
return EINVAL; |
|
|
return 0; |
|
|
} |
|
|
|
|
|
# elif defined _AIX |
|
|
|
|
|
|
|
|
void |
|
|
glthread_spinlock_init (gl_spinlock_t *lock) |
|
|
{ |
|
|
atomic_p vp = (int *) lock; |
|
|
_clear_lock (vp, 0); |
|
|
} |
|
|
|
|
|
void |
|
|
glthread_spinlock_lock (gl_spinlock_t *lock) |
|
|
{ |
|
|
atomic_p vp = (int *) lock; |
|
|
while (_check_lock (vp, 0, 1)) |
|
|
; |
|
|
} |
|
|
|
|
|
int |
|
|
glthread_spinlock_unlock (gl_spinlock_t *lock) |
|
|
{ |
|
|
atomic_p vp = (int *) lock; |
|
|
if (_check_lock (vp, 1, 0)) |
|
|
return EINVAL; |
|
|
return 0; |
|
|
} |
|
|
|
|
|
# elif ((defined __GNUC__ || defined __clang__ || defined __SUNPRO_C) && (defined __sparc || defined __i386 || defined __x86_64__)) || (defined __TINYC__ && (defined __i386 || defined __x86_64__)) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
static void |
|
|
memory_barrier (void) |
|
|
{ |
|
|
# if defined __GNUC__ || defined __clang__ || __SUNPRO_C >= 0x590 || defined __TINYC__ |
|
|
# if defined __i386 || defined __x86_64__ |
|
|
# if defined __TINYC__ && defined __i386 |
|
|
|
|
|
asm volatile ("lock orl $0,(%esp)"); |
|
|
# else |
|
|
asm volatile ("mfence"); |
|
|
# endif |
|
|
# endif |
|
|
# if defined __sparc |
|
|
asm volatile ("membar 2"); |
|
|
# endif |
|
|
# else |
|
|
# if defined __i386 || defined __x86_64__ |
|
|
asm ("mfence"); |
|
|
# endif |
|
|
# if defined __sparc |
|
|
asm ("membar 2"); |
|
|
# endif |
|
|
# endif |
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
static unsigned int |
|
|
atomic_compare_and_swap (volatile unsigned int *vp, unsigned int cmp, |
|
|
unsigned int newval) |
|
|
{ |
|
|
# if defined __GNUC__ || defined __clang__ || __SUNPRO_C >= 0x590 || defined __TINYC__ |
|
|
unsigned int oldval; |
|
|
# if defined __i386 || defined __x86_64__ |
|
|
asm volatile (" lock\n cmpxchgl %3,(%1)" |
|
|
: "=a" (oldval) : "r" (vp), "a" (cmp), "r" (newval) : "memory"); |
|
|
# endif |
|
|
# if defined __sparc |
|
|
asm volatile (" cas [%1],%2,%3\n" |
|
|
" mov %3,%0" |
|
|
: "=r" (oldval) : "r" (vp), "r" (cmp), "r" (newval) : "memory"); |
|
|
# endif |
|
|
return oldval; |
|
|
# else |
|
|
# if defined __x86_64__ |
|
|
asm (" movl %esi,%eax\n" |
|
|
" lock\n cmpxchgl %edx,(%rdi)"); |
|
|
# elif defined __i386 |
|
|
asm (" movl 16(%ebp),%ecx\n" |
|
|
" movl 12(%ebp),%eax\n" |
|
|
" movl 8(%ebp),%edx\n" |
|
|
" lock\n cmpxchgl %ecx,(%edx)"); |
|
|
# endif |
|
|
# if defined __sparc |
|
|
asm (" cas [%i0],%i1,%i2\n" |
|
|
" mov %i2,%i0"); |
|
|
# endif |
|
|
# endif |
|
|
} |
|
|
|
|
|
void |
|
|
glthread_spinlock_init (gl_spinlock_t *lock) |
|
|
{ |
|
|
volatile unsigned int *vp = lock; |
|
|
*vp = 0; |
|
|
memory_barrier (); |
|
|
} |
|
|
|
|
|
void |
|
|
glthread_spinlock_lock (gl_spinlock_t *lock) |
|
|
{ |
|
|
volatile unsigned int *vp = lock; |
|
|
while (atomic_compare_and_swap (vp, 0, 1) != 0) |
|
|
; |
|
|
} |
|
|
|
|
|
int |
|
|
glthread_spinlock_unlock (gl_spinlock_t *lock) |
|
|
{ |
|
|
volatile unsigned int *vp = lock; |
|
|
if (atomic_compare_and_swap (vp, 1, 0) != 1) |
|
|
return EINVAL; |
|
|
return 0; |
|
|
} |
|
|
|
|
|
# else |
|
|
|
|
|
|
|
|
void |
|
|
glthread_spinlock_init (gl_spinlock_t *lock) |
|
|
{ |
|
|
volatile unsigned int *vp = lock; |
|
|
*vp = 0; |
|
|
} |
|
|
|
|
|
void |
|
|
glthread_spinlock_lock (gl_spinlock_t *lock) |
|
|
{ |
|
|
volatile unsigned int *vp = lock; |
|
|
while (*vp) |
|
|
; |
|
|
*vp = 1; |
|
|
} |
|
|
|
|
|
int |
|
|
glthread_spinlock_unlock (gl_spinlock_t *lock) |
|
|
{ |
|
|
volatile unsigned int *vp = lock; |
|
|
*vp = 0; |
|
|
return 0; |
|
|
} |
|
|
|
|
|
# endif |
|
|
|
|
|
void |
|
|
glthread_spinlock_destroy (gl_spinlock_t *lock) |
|
|
{ |
|
|
} |
|
|
|
|
|
#endif |
|
|
|