".previous\n" \
"661:\n\tlock; "
-static inline initdsmlocks(volatile unsigned int *addr) {
+static inline initdsmlocks(volatile int *addr) {
(*addr) = SWAP_LOCK_BIAS;
}
//int write_trylock(volatile unsigned int *lock);
}
*/
-static inline int write_trylock(volatile unsigned int *lock) {
+static inline int write_trylock(volatile int *lock) {
int retval=0;
__asm__ __volatile__("xchgl %0,%1"
: "=r"(retval)
return retval;
}
-static inline void write_unlock(volatile unsigned int *lock) {
+static inline void write_unlock(volatile int *lock) {
__asm__ __volatile__("movl $1, %0" : "+m" (*lock)::"memory");
}
-static inline void atomic_add(int i, volatile unsigned int *v) {
+static inline void atomic_add(int i, volatile int *v) {
__asm__ __volatile__ (LOCK_PREFIX "addl %1,%0"
: "+m" (*v)
: "ir" (i));
}
-static inline void rwread_unlock(volatile unsigned int *rw) {
+static inline void rwread_unlock(volatile int *rw) {
__asm__ __volatile__ (LOCK_PREFIX "incl %0" : "+m" (*rw) : : "memory");
}
-static inline void rwwrite_unlock(volatile unsigned int *rw) {
+static inline void rwwrite_unlock(volatile int *rw) {
__asm__ __volatile__ (LOCK_PREFIX "addl %1, %0"
: "+m" (*rw) : "i" (RW_LOCK_BIAS) : "memory");
}
-static inline void rwconvert_unlock(volatile unsigned int *rw) {
+static inline void rwconvert_unlock(volatile int *rw) {
__asm__ __volatile__ (LOCK_PREFIX "addl %1, %0"
: "+m" (*rw) : "i" (RW_LOCK_BIAS-1) : "memory");
}
-static inline void atomic_dec(volatile unsigned int *v) {
+static inline void atomic_dec(volatile int *v) {
__asm__ __volatile__ (LOCK_PREFIX "decl %0"
: "+m" (*v));
}
-static inline void atomic_inc(volatile unsigned int *v) {
+static inline void atomic_inc(volatile int *v) {
__asm__ __volatile__ (LOCK_PREFIX "incl %0"
: "+m" (*v));
}
-static inline int atomic_sub_and_test(int i, volatile unsigned int *v) {
+static inline int atomic_sub_and_test(int i, volatile int *v) {
unsigned char c;
__asm__ __volatile__ (LOCK_PREFIX "subl %2,%0; sete %1"
return c;
}
-static inline int rwwrite_trylock(volatile unsigned int *ptr) {
+static inline int rwwrite_trylock(volatile int *ptr) {
//static inline unsigned long cas(volatile unsigned int* ptr) {
- unsigned int prev;
+ int prev;
__asm__ __volatile__("lock;"
"cmpxchgl %1, %2;"
: "=a"(prev)
#define atomic_read(v) (*v)
-static inline int rwread_trylock(volatile unsigned int *lock) {
+static inline int rwread_trylock(volatile int *lock) {
atomic_dec(lock);
if (likely(atomic_read(lock) >=0 ))
return 1; //can aquire a new read lock
// return 0; // failed to acquire a write lock
//}
-static inline int rwconvert_trylock(volatile unsigned int *lock) {
+static inline int rwconvert_trylock(volatile int *lock) {
if (likely(atomic_sub_and_test((RW_LOCK_BIAS-1), lock))) {
return 1; // get a write lock
}