4 #define SWAP_LOCK_BIAS 1
5 #define CFENCE asm volatile("":::"memory");
7 #define RW_LOCK_BIAS 0x01000000
10 ".section .smp_locks,\"a\"\n" \
12 " .long 661f\n" /* address */\
16 void initdsmlocks(volatile unsigned int *addr);
17 //int write_trylock(volatile unsigned int *lock);
18 //void write_unlock(volatile unsigned int *lock);
21 static inline void initdsmlocks(volatile unsigned int *addr) {
22 (*addr) = RW_LOCK_BIAS;
26 static inline int write_trylock(volatile unsigned int *lock) {
28 __asm__ __volatile__("xchgl %0,%1"
30 : "m"(*lock), "0"(retval)
35 static inline void write_unlock(volatile unsigned int *lock) {
36 __asm__ __volatile__("movl $1, %0" : "+m" (*lock)::"memory");
40 static inline void atomic_add(int i, unsigned int *v) {
41 __asm__ __volatile__ (LOCK_PREFIX "addl %1,%0"
46 static inline void read_unlock(volatile unsigned int *rw) {
47 __asm__ __volatile__ (LOCK_PREFIX "incl %0" : "+m" (*rw) : : "memory");
50 static inline void write_unlock(volatile unsigned int *rw) {
51 __asm__ __volatile__ (LOCK_PREFIX "addl %1, %0"
52 : "+m" (*rw) : "i" (RW_LOCK_BIAS) : "memory");
55 static inline void atomic_dec(volatile unsigned int *v) {
56 __asm__ __volatile__ (LOCK_PREFIX "decl %0"
60 static inline void atomic_inc(volatile unsigned int *v) {
61 __asm__ __volatile__ (LOCK_PREFIX "incl %0"
65 static inline int atomic_sub_and_test(int i, unsigned int *v) {
68 __asm__ __volatile__ (LOCK_PREFIX "subl %2,%0; sete %1"
69 : "+m" (*v), "=qm" (c)
70 : "ir" (i) : "memory");
74 #define atomic_read(v) (*v)
76 static inline int read_trylock(volatile unsigned int *lock) {
78 if (atomic_read(lock) >= 0)
79 return 1; //can aquire a new read lock
84 static inline int write_trylock(volatile unsigned int *lock) {
85 if (atomic_sub_and_test(RW_LOCK_BIAS, *lock)) {
86 return 1; // get a write lock
88 atomic_add(RW_LOCK_BIAS, *lock);
89 return 0; // failed to acquire a write lock