--- linux/include/asm-i386/spinlock.h.p 2007-04-02 00:38:49.000000000 -0400 +++ linux/include/asm-i386/spinlock.h 2007-04-02 00:40:18.000000000 -0400 @@ -29,69 +29,31 @@ static inline int __raw_spin_is_locked(raw_spinlock_t *x) { - return *(volatile signed char *)(&(x)->slock) <= 0; + return unlikely(x->qhead != x->qtail); } static inline void __raw_spin_lock(raw_spinlock_t *lock) { - asm volatile("\n1:\t" - LOCK_PREFIX " ; decb %0\n\t" - "jns 3f\n" - "2:\t" - "rep;nop\n\t" - "cmpb $0,%0\n\t" - "jle 2b\n\t" - "jmp 1b\n" - "3:\n\t" - : "+m" (lock->slock) : : "memory"); -} + unsigned short pos = 1; -/* - * It is easier for the lock validator if interrupts are not re-enabled - * in the middle of a lock-acquire. This is a performance feature anyway - * so we turn it off: - * - * NOTE: there's an irqs-on section here, which normally would have to be - * irq-traced, but on CONFIG_TRACE_IRQFLAGS we never use this variant. - */ -#ifndef CONFIG_PROVE_LOCKING -static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags) -{ - asm volatile( - "\n1:\t" - LOCK_PREFIX " ; decb %[slock]\n\t" - "jns 5f\n" - "2:\t" - "testl $0x200, %[flags]\n\t" - "jz 4f\n\t" - STI_STRING "\n" - "3:\t" - "rep;nop\n\t" - "cmpb $0, %[slock]\n\t" - "jle 3b\n\t" - CLI_STRING "\n\t" - "jmp 1b\n" - "4:\t" - "rep;nop\n\t" - "cmpb $0, %[slock]\n\t" - "jg 1b\n\t" - "jmp 4b\n" - "5:\n\t" - : [slock] "+m" (lock->slock) - : [flags] "r" (flags) - CLI_STI_INPUT_ARGS - : "memory" CLI_STI_CLOBBERS); + asm volatile( + LOCK_PREFIX "xaddw %0, %1\n\t" + "1:\n\t" + "cmpw %0, %2\n\t" + "je 2f\n\t" + "rep;nop\n\t" + "jmp 1b\n\t" + "2:\n\t" + : "+r" (pos), "+m" (lock->qhead) + : "m" (lock->qtail) : "memory"); } -#endif + +#define __raw_spin_lock_flags(a,b) __raw_spin_lock(a) static inline int __raw_spin_trylock(raw_spinlock_t *lock) { - char oldval; - asm volatile( - "xchgb %b0,%1" - :"=q" (oldval), "+m" (lock->slock) - :"0" (0) : "memory"); - return oldval > 0; + unsigned short qtail = lock->qtail; + return likely(cmpxchg(&lock->qhead, qtail, qtail+1) == qtail); } /* @@ -105,18 +67,14 @@ static inline int __raw_spin_trylock(raw static inline void __raw_spin_unlock(raw_spinlock_t *lock) { - asm volatile("movb $1,%0" : "+m" (lock->slock) :: "memory"); + asm volatile("addw $1,%0" : "+m" (lock->qtail) :: "memory"); } #else static inline void __raw_spin_unlock(raw_spinlock_t *lock) { - char oldval = 1; - - asm volatile("xchgb %b0, %1" - : "=q" (oldval), "+m" (lock->slock) - : "0" (oldval) : "memory"); + asm volatile(LOCK_PREFIX "addw $1,%0" : "+m" (lock->qtail) :: "memory"); } #endif --- linux/include/asm-i386/spinlock_types.h.p 2007-04-02 00:39:00.000000000 -0400 +++ linux/include/asm-i386/spinlock_types.h 2007-04-02 00:39:13.000000000 -0400 @@ -6,10 +6,11 @@ #endif typedef struct { - unsigned int slock; + unsigned short qhead; + unsigned short qtail; } raw_spinlock_t; -#define __RAW_SPIN_LOCK_UNLOCKED { 1 } +#define __RAW_SPIN_LOCK_UNLOCKED { 0, 0 } typedef struct { unsigned int lock;