Skip to content

Commit

Permalink
[PATCH] x86_64: Use int operations in spinlocks to support more than …
Browse files Browse the repository at this point in the history
…128 CPUs spinning.

Pointed out by Eric Dumazet

Signed-off-by: Andi Kleen <ak@suse.de>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
  • Loading branch information
Andi Kleen authored and Linus Torvalds committed Nov 15, 2005
1 parent 8315eca commit 485832a
Showing 1 changed file with 6 additions and 6 deletions.
12 changes: 6 additions & 6 deletions include/asm-x86_64/spinlock.h
Original file line number Diff line number Diff line change
Expand Up @@ -18,22 +18,22 @@
*/

#define __raw_spin_is_locked(x) \
(*(volatile signed char *)(&(x)->slock) <= 0)
(*(volatile signed int *)(&(x)->slock) <= 0)

#define __raw_spin_lock_string \
"\n1:\t" \
"lock ; decb %0\n\t" \
"lock ; decl %0\n\t" \
"js 2f\n" \
LOCK_SECTION_START("") \
"2:\t" \
"rep;nop\n\t" \
"cmpb $0,%0\n\t" \
"cmpl $0,%0\n\t" \
"jle 2b\n\t" \
"jmp 1b\n" \
LOCK_SECTION_END

#define __raw_spin_unlock_string \
"movb $1,%0" \
"movl $1,%0" \
:"=m" (lock->slock) : : "memory"

static inline void __raw_spin_lock(raw_spinlock_t *lock)
Expand All @@ -47,10 +47,10 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)

static inline int __raw_spin_trylock(raw_spinlock_t *lock)
{
char oldval;
int oldval;

__asm__ __volatile__(
"xchgb %b0,%1"
"xchgl %0,%1"
:"=q" (oldval), "=m" (lock->slock)
:"0" (0) : "memory");

Expand Down

0 comments on commit 485832a

Please sign in to comment.