Skip to content

Commit

Permalink
---
Browse files Browse the repository at this point in the history
yaml
---
r: 89023
b: refs/heads/master
c: d3bf60a
h: refs/heads/master
i:
  89021: 7e9363d
  89019: e8dce17
  89015: 6231961
  89007: e1ffcfe
  88991: 50db69a
  88959: cc0c740
v: v3
  • Loading branch information
Joe Perches authored and Ingo Molnar committed Apr 17, 2008
1 parent 02b5aef commit eb7ec47
Show file tree
Hide file tree
Showing 2 changed files with 51 additions and 56 deletions.
2 changes: 1 addition & 1 deletion [refs]
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
---
refs/heads/master: ceb7ce1052a9087bd4752424f253b883ec5e1cec
refs/heads/master: d3bf60a6e48c9a451cac345c0ad57552bb299992
105 changes: 50 additions & 55 deletions trunk/include/asm-x86/spinlock.h
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
{
short inc = 0x0100;

__asm__ __volatile__ (
asm volatile (
LOCK_PREFIX "xaddw %w0, %1\n"
"1:\t"
"cmpb %h0, %b0\n\t"
Expand All @@ -92,9 +92,9 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
/* don't need lfence here, because loads are in-order */
"jmp 1b\n"
"2:"
:"+Q" (inc), "+m" (lock->slock)
: "+Q" (inc), "+m" (lock->slock)
:
:"memory", "cc");
: "memory", "cc");
}

#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
Expand All @@ -104,30 +104,28 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock)
int tmp;
short new;

asm volatile(
"movw %2,%w0\n\t"
"cmpb %h0,%b0\n\t"
"jne 1f\n\t"
"movw %w0,%w1\n\t"
"incb %h1\n\t"
"lock ; cmpxchgw %w1,%2\n\t"
"1:"
"sete %b1\n\t"
"movzbl %b1,%0\n\t"
:"=&a" (tmp), "=Q" (new), "+m" (lock->slock)
:
: "memory", "cc");
asm volatile("movw %2,%w0\n\t"
"cmpb %h0,%b0\n\t"
"jne 1f\n\t"
"movw %w0,%w1\n\t"
"incb %h1\n\t"
"lock ; cmpxchgw %w1,%2\n\t"
"1:"
"sete %b1\n\t"
"movzbl %b1,%0\n\t"
: "=&a" (tmp), "=Q" (new), "+m" (lock->slock)
:
: "memory", "cc");

return tmp;
}

static inline void __raw_spin_unlock(raw_spinlock_t *lock)
{
__asm__ __volatile__(
UNLOCK_LOCK_PREFIX "incb %0"
:"+m" (lock->slock)
:
:"memory", "cc");
asm volatile(UNLOCK_LOCK_PREFIX "incb %0"
: "+m" (lock->slock)
:
: "memory", "cc");
}
#else
static inline int __raw_spin_is_locked(raw_spinlock_t *lock)
Expand All @@ -149,21 +147,20 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
int inc = 0x00010000;
int tmp;

__asm__ __volatile__ (
"lock ; xaddl %0, %1\n"
"movzwl %w0, %2\n\t"
"shrl $16, %0\n\t"
"1:\t"
"cmpl %0, %2\n\t"
"je 2f\n\t"
"rep ; nop\n\t"
"movzwl %1, %2\n\t"
/* don't need lfence here, because loads are in-order */
"jmp 1b\n"
"2:"
:"+Q" (inc), "+m" (lock->slock), "=r" (tmp)
:
:"memory", "cc");
asm volatile("lock ; xaddl %0, %1\n"
"movzwl %w0, %2\n\t"
"shrl $16, %0\n\t"
"1:\t"
"cmpl %0, %2\n\t"
"je 2f\n\t"
"rep ; nop\n\t"
"movzwl %1, %2\n\t"
/* don't need lfence here, because loads are in-order */
"jmp 1b\n"
"2:"
: "+Q" (inc), "+m" (lock->slock), "=r" (tmp)
:
: "memory", "cc");
}

#define __raw_spin_lock_flags(lock, flags) __raw_spin_lock(lock)
Expand All @@ -173,31 +170,29 @@ static inline int __raw_spin_trylock(raw_spinlock_t *lock)
int tmp;
int new;

asm volatile(
"movl %2,%0\n\t"
"movl %0,%1\n\t"
"roll $16, %0\n\t"
"cmpl %0,%1\n\t"
"jne 1f\n\t"
"addl $0x00010000, %1\n\t"
"lock ; cmpxchgl %1,%2\n\t"
"1:"
"sete %b1\n\t"
"movzbl %b1,%0\n\t"
:"=&a" (tmp), "=r" (new), "+m" (lock->slock)
:
: "memory", "cc");
asm volatile("movl %2,%0\n\t"
"movl %0,%1\n\t"
"roll $16, %0\n\t"
"cmpl %0,%1\n\t"
"jne 1f\n\t"
"addl $0x00010000, %1\n\t"
"lock ; cmpxchgl %1,%2\n\t"
"1:"
"sete %b1\n\t"
"movzbl %b1,%0\n\t"
: "=&a" (tmp), "=r" (new), "+m" (lock->slock)
:
: "memory", "cc");

return tmp;
}

static inline void __raw_spin_unlock(raw_spinlock_t *lock)
{
__asm__ __volatile__(
UNLOCK_LOCK_PREFIX "incw %0"
:"+m" (lock->slock)
:
:"memory", "cc");
asm volatile(UNLOCK_LOCK_PREFIX "incw %0"
: "+m" (lock->slock)
:
: "memory", "cc");
}
#endif

Expand Down

0 comments on commit eb7ec47

Please sign in to comment.