Skip to content

Commit

Permalink
[PATCH] i386: Remove alternative_smp
Browse files Browse the repository at this point in the history
The .fill causes miscompilations with some binutils version.

Instead just patch the lock prefix in the lock constructs. That is the
majority of the cost and should be good enough.

Cc: Gerd Hoffmann <kraxel@suse.de>
Signed-off-by: Andi Kleen <ak@suse.de>
Signed-off-by: Linus Torvalds <torvalds@osdl.org>
  • Loading branch information
Andi Kleen authored and Linus Torvalds committed Aug 30, 2006
1 parent 841be8d commit 8c74932
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 41 deletions.
20 changes: 0 additions & 20 deletions include/asm-i386/alternative.h
Original file line number Diff line number Diff line change
Expand Up @@ -88,9 +88,6 @@ static inline void alternatives_smp_switch(int smp) {}
/*
* Alternative inline assembly for SMP.
*
* alternative_smp() takes two versions (SMP first, UP second) and is
* for more complex stuff such as spinlocks.
*
* The LOCK_PREFIX macro defined here replaces the LOCK and
* LOCK_PREFIX macros used everywhere in the source tree.
*
Expand All @@ -110,21 +107,6 @@ static inline void alternatives_smp_switch(int smp) {}
*/

#ifdef CONFIG_SMP
#define alternative_smp(smpinstr, upinstr, args...) \
asm volatile ("661:\n\t" smpinstr "\n662:\n" \
".section .smp_altinstructions,\"a\"\n" \
" .align 4\n" \
" .long 661b\n" /* label */ \
" .long 663f\n" /* new instruction */ \
" .byte " __stringify(X86_FEATURE_UP) "\n" \
" .byte 662b-661b\n" /* sourcelen */ \
" .byte 664f-663f\n" /* replacementlen */ \
".previous\n" \
".section .smp_altinstr_replacement,\"awx\"\n" \
"663:\n\t" upinstr "\n" /* replacement */ \
"664:\n\t.fill 662b-661b,1,0x42\n" /* space for original */ \
".previous" : args)

#define LOCK_PREFIX \
".section .smp_locks,\"a\"\n" \
" .align 4\n" \
Expand All @@ -133,8 +115,6 @@ static inline void alternatives_smp_switch(int smp) {}
"661:\n\tlock; "

#else /* ! CONFIG_SMP */
#define alternative_smp(smpinstr, upinstr, args...) \
asm volatile (upinstr : args)
#define LOCK_PREFIX ""
#endif

Expand Down
14 changes: 6 additions & 8 deletions include/asm-i386/rwlock.h
Original file line number Diff line number Diff line change
Expand Up @@ -21,22 +21,20 @@
#define RW_LOCK_BIAS_STR "0x01000000"

#define __build_read_lock_ptr(rw, helper) \
alternative_smp("lock; subl $1,(%0)\n\t" \
asm volatile(LOCK_PREFIX " ; subl $1,(%0)\n\t" \
"jns 1f\n" \
"call " helper "\n\t" \
"1:\n", \
"subl $1,(%0)\n\t", \
"1:\n" \
:"a" (rw) : "memory")

#define __build_read_lock_const(rw, helper) \
alternative_smp("lock; subl $1,%0\n\t" \
asm volatile(LOCK_PREFIX " ; subl $1,%0\n\t" \
"jns 1f\n" \
"pushl %%eax\n\t" \
"leal %0,%%eax\n\t" \
"call " helper "\n\t" \
"popl %%eax\n\t" \
"1:\n", \
"subl $1,%0\n\t", \
"1:\n" : \
"+m" (*(volatile int *)rw) : : "memory")

#define __build_read_lock(rw, helper) do { \
Expand All @@ -47,15 +45,15 @@
} while (0)

#define __build_write_lock_ptr(rw, helper) \
alternative_smp("lock; subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
asm volatile(LOCK_PREFIX " ; subl $" RW_LOCK_BIAS_STR ",(%0)\n\t" \
"jz 1f\n" \
"call " helper "\n\t" \
"1:\n", \
"subl $" RW_LOCK_BIAS_STR ",(%0)\n\t", \
:"a" (rw) : "memory")

#define __build_write_lock_const(rw, helper) \
alternative_smp("lock; subl $" RW_LOCK_BIAS_STR ",%0\n\t" \
asm volatile(LOCK_PREFIX " ; subl $" RW_LOCK_BIAS_STR ",%0\n\t" \
"jz 1f\n" \
"pushl %%eax\n\t" \
"leal %0,%%eax\n\t" \
Expand Down
17 changes: 4 additions & 13 deletions include/asm-i386/spinlock.h
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@

#define __raw_spin_lock_string \
"\n1:\t" \
"lock ; decb %0\n\t" \
LOCK_PREFIX " ; decb %0\n\t" \
"jns 3f\n" \
"2:\t" \
"rep;nop\n\t" \
Expand All @@ -38,7 +38,7 @@
*/
#define __raw_spin_lock_string_flags \
"\n1:\t" \
"lock ; decb %0\n\t" \
LOCK_PREFIX " ; decb %0\n\t" \
"jns 5f\n" \
"2:\t" \
"testl $0x200, %1\n\t" \
Expand All @@ -57,15 +57,9 @@
"jmp 4b\n" \
"5:\n\t"

#define __raw_spin_lock_string_up \
"\n\tdecb %0"

static inline void __raw_spin_lock(raw_spinlock_t *lock)
{
alternative_smp(
__raw_spin_lock_string,
__raw_spin_lock_string_up,
"+m" (lock->slock) : : "memory");
asm(__raw_spin_lock_string : "+m" (lock->slock) : : "memory");
}

/*
Expand All @@ -76,10 +70,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
#ifndef CONFIG_PROVE_LOCKING
static inline void __raw_spin_lock_flags(raw_spinlock_t *lock, unsigned long flags)
{
alternative_smp(
__raw_spin_lock_string_flags,
__raw_spin_lock_string_up,
"+m" (lock->slock) : "r" (flags) : "memory");
asm(__raw_spin_lock_string_flags : "+m" (lock->slock) : "r" (flags) : "memory");
}
#endif

Expand Down

0 comments on commit 8c74932

Please sign in to comment.