65 #if defined(i686_ARCH) || defined(x86_64_ARCH)
66 static __inline__
int tracelib_test_and_set(MALP_Spinlock * atomic)
69 __asm__ __volatile__(
"lock; xchgl %0, %1":
"=r"(ret),
"=m"(*atomic):
"0"(1),
"m"(*atomic):
"memory");
72 static __inline__
void cpu_relax(
void)
74 __asm__ __volatile__(
"rep;nop":::
"memory");
76 #elif defined(ia64_ARCH)
77 static __inline__
int tracelib_test_and_set(MALP_Spinlock * atomic)
80 __asm__ __volatile__(
"xchg4 %0=%1, %2":
"=r"(ret),
"=m"(*atomic):
"0"(1),
"m"(*atomic):
"memory");
83 static __inline__
void cpu_relax(
void)
85 __asm__ __volatile__(
"hint @pause":::
"memory");
87 #elif defined(sparc_ARCH)
88 static __inline__
int tracelib_test_and_set(MALP_Spinlock * atomic)
91 __asm__ __volatile__(
"ldstub [%0], %1":
"=r"(spinlock),
"=r"(ret):
"0"(spinlock),
"1"(ret):
"memory");
92 return (
unsigned) ret;
94 static __inline__
void cpu_relax(
void)
int MALP_Spinlock_unlock(MALP_Spinlock *atomic)
Unlocks the given MALP_Spinlock.
int MALP_Spinlock_lock(MALP_Spinlock *atomic)
Locks the given MALP_Spinlock.
volatile unsigned int MALP_Spinlock
The type of spinlocks in MALP.
int MALP_Spinlock_trylock(MALP_Spinlock *mutex)
Tries to lock the given MALP_Spinlock.