1 #ifndef ____MLP_LOCK_H__
2 #define ____MLP_LOCK_H__
11 #define __xg(x) ((volatile INTPTR *)(x))
13 #define CFENCE asm volatile("":::"memory");
14 #define MFENCE asm volatile("mfence":::"memory");
17 ".section .smp_locks,\"a\"\n" \
19 " .long 661f\n" /* address */\
24 static inline void atomic_dec(volatile int *v) {
25 __asm__ __volatile__ (LOCK_PREFIX "decl %0"
29 static inline void atomic_inc(volatile int *v) {
30 __asm__ __volatile__ (LOCK_PREFIX "incl %0"
34 // this returns TRUE if the atomic subtraction results in
35 // a zero value--this way two threads cannot dec a value
36 // atomically, but then go ahead and both read zero,
37 // thinking they both are the last decrementer
38 static inline int atomic_sub_and_test(int i, volatile int *v) {
41 __asm__ __volatile__ (LOCK_PREFIX "subl %2,%0; sete %1"
42 : "+m" (*v), "=qm" (c)
43 : "ir" (i) : "memory");
48 static inline void atomic_add(int i, volatile int *v) {
49 __asm__ __volatile__ (LOCK_PREFIX "addl %1,%0"
54 static inline int LOCKXCHG32(volatile int* ptr, int val){
56 //note: xchgl always implies lock
57 __asm__ __volatile__("xchgl %0,%1"
66 // LOCKXCH atomically does the following:
67 // INTPTR retval=*ptr;
71 static inline INTPTR LOCKXCHG(volatile INTPTR * ptr, INTPTR val){
73 //note: xchgl always implies lock
74 __asm__ __volatile__("xchgq %0,%1"
82 #define LOCKXCHG LOCKXCHG32
86 static inline int write_trylock(volatile int *lock) {
88 __asm__ __volatile__("xchgl %0,%1"
90 : "m"(*lock), "0"(retval)
97 static inline INTPTR CAS(volatile void *ptr, unsigned INTPTR old, unsigned INTPTR new){
99 __asm__ __volatile__("lock; cmpxchgq %1,%2"
101 : "r"(new), "m"(*__xg(ptr)), "0"(old)
106 static inline long CAS32(volatile void *ptr, unsigned long old, unsigned long new){
108 __asm__ __volatile__("lock; cmpxchgl %k1,%2"
110 : "r"(new), "m"(*__xg(ptr)), "0"(old)
115 static inline long CAS(volatile void *ptr, unsigned long old, unsigned long new){
117 __asm__ __volatile__("lock; cmpxchgl %k1,%2"
119 : "r"(new), "m"(*__xg(ptr)), "0"(old)
127 static inline int BARRIER(){
132 static inline int MBARRIER(){
138 #endif // ____MLP_LOCK_H__