Skip to content

Commit

Permalink
[MIPS] Make support for weakly ordered LL/SC a config option.
Browse files Browse the repository at this point in the history
None of weakly ordered processor supported in tree need this but it seems
like this could change ...

Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
  • Loading branch information
Ralf Baechle committed Jul 20, 2007
1 parent ed203da commit 17099b1
Show file tree
Hide file tree
Showing 7 changed files with 59 additions and 38 deletions.
11 changes: 11 additions & 0 deletions arch/mips/Kconfig
Original file line number Diff line number Diff line change
Expand Up @@ -1190,8 +1190,19 @@ config SYS_HAS_CPU_RM9000
config SYS_HAS_CPU_SB1
bool

#
# CPU may reorder R->R, R->W, W->R, W->W
# Reordering beyond LL and SC is handled in WEAK_REORDERING_BEYOND_LLSC
#
config WEAK_ORDERING
bool

#
# CPU may reorder reads and writes beyond LL/SC
# CPU may reorder R->LL, R->LL, W->LL, W->LL, R->SC, R->SC, W->SC, W->SC
#
config WEAK_REORDERING_BEYOND_LLSC
bool
endmenu

#
Expand Down
33 changes: 17 additions & 16 deletions include/asm-mips/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
{
unsigned long result;

smp_mb();
smp_llsc_mb();

if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
Expand Down Expand Up @@ -181,7 +181,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v)
raw_local_irq_restore(flags);
}

smp_mb();
smp_llsc_mb();

return result;
}
Expand All @@ -190,7 +190,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
{
unsigned long result;

smp_mb();
smp_llsc_mb();

if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
Expand Down Expand Up @@ -233,7 +233,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v)
raw_local_irq_restore(flags);
}

smp_mb();
smp_llsc_mb();

return result;
}
Expand All @@ -250,7 +250,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
{
unsigned long result;

smp_mb();
smp_llsc_mb();

if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
Expand Down Expand Up @@ -302,7 +302,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
raw_local_irq_restore(flags);
}

smp_mb();
smp_llsc_mb();

return result;
}
Expand Down Expand Up @@ -519,7 +519,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v)
{
unsigned long result;

smp_mb();
smp_llsc_mb();

if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
Expand Down Expand Up @@ -562,7 +562,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v)
raw_local_irq_restore(flags);
}

smp_mb();
smp_llsc_mb();

return result;
}
Expand All @@ -571,7 +571,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
{
unsigned long result;

smp_mb();
smp_llsc_mb();

if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
Expand Down Expand Up @@ -614,7 +614,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
raw_local_irq_restore(flags);
}

smp_mb();
smp_llsc_mb();

return result;
}
Expand All @@ -631,7 +631,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
{
unsigned long result;

smp_mb();
smp_llsc_mb();

if (cpu_has_llsc && R10000_LLSC_WAR) {
unsigned long temp;
Expand Down Expand Up @@ -683,7 +683,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
raw_local_irq_restore(flags);
}

smp_mb();
smp_llsc_mb();

return result;
}
Expand Down Expand Up @@ -791,10 +791,11 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
* atomic*_return operations are serializing but not the non-*_return
* versions.
*/
#define smp_mb__before_atomic_dec() smp_mb()
#define smp_mb__after_atomic_dec() smp_mb()
#define smp_mb__before_atomic_inc() smp_mb()
#define smp_mb__after_atomic_inc() smp_mb()
#define smp_mb__before_atomic_dec() smp_llsc_mb()
#define smp_mb__after_atomic_dec() smp_llsc_mb()
#define smp_mb__before_atomic_inc() smp_llsc_mb()
#define smp_mb__after_atomic_inc() smp_llsc_mb()

#include <asm-generic/atomic.h>

#endif /* _ASM_ATOMIC_H */
9 changes: 9 additions & 0 deletions include/asm-mips/barrier.h
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,11 @@
#else
#define __WEAK_ORDERING_MB " \n"
#endif
#if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP)
#define __WEAK_LLSC_MB " sync \n"
#else
#define __WEAK_LLSC_MB " \n"
#endif

#define smp_mb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
#define smp_rmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory")
Expand All @@ -129,4 +134,8 @@
#define set_mb(var, value) \
do { var = value; smp_mb(); } while (0)

#define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
#define smp_llsc_rmb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")
#define smp_llsc_wmb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory")

#endif /* __ASM_BARRIER_H */
10 changes: 5 additions & 5 deletions include/asm-mips/bitops.h
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@
/*
* clear_bit() doesn't provide any barrier for the compiler.
*/
#define smp_mb__before_clear_bit() smp_mb()
#define smp_mb__after_clear_bit() smp_mb()
#define smp_mb__before_clear_bit() smp_llsc_mb()
#define smp_mb__after_clear_bit() smp_llsc_mb()

/*
* set_bit - Atomically set a bit in memory
Expand Down Expand Up @@ -289,7 +289,7 @@ static inline int test_and_set_bit(unsigned long nr,
raw_local_irq_restore(flags);
}

smp_mb();
smp_llsc_mb();

return res != 0;
}
Expand Down Expand Up @@ -377,7 +377,7 @@ static inline int test_and_clear_bit(unsigned long nr,
raw_local_irq_restore(flags);
}

smp_mb();
smp_llsc_mb();

return res != 0;
}
Expand Down Expand Up @@ -445,7 +445,7 @@ static inline int test_and_change_bit(unsigned long nr,
raw_local_irq_restore(flags);
}

smp_mb();
smp_llsc_mb();

return res != 0;
}
Expand Down
8 changes: 4 additions & 4 deletions include/asm-mips/futex.h
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
" .set mips3 \n" \
"2: sc $1, %2 \n" \
" beqzl $1, 1b \n" \
__WEAK_ORDERING_MB \
__WEAK_LLSC_MB \
"3: \n" \
" .set pop \n" \
" .set mips0 \n" \
Expand All @@ -55,7 +55,7 @@
" .set mips3 \n" \
"2: sc $1, %2 \n" \
" beqz $1, 1b \n" \
__WEAK_ORDERING_MB \
__WEAK_LLSC_MB \
"3: \n" \
" .set pop \n" \
" .set mips0 \n" \
Expand Down Expand Up @@ -152,7 +152,7 @@ futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
" .set mips3 \n"
"2: sc $1, %1 \n"
" beqzl $1, 1b \n"
__WEAK_ORDERING_MB
__WEAK_LLSC_MB
"3: \n"
" .set pop \n"
" .section .fixup,\"ax\" \n"
Expand All @@ -179,7 +179,7 @@ futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
" .set mips3 \n"
"2: sc $1, %1 \n"
" beqz $1, 1b \n"
__WEAK_ORDERING_MB
__WEAK_LLSC_MB
"3: \n"
" .set pop \n"
" .section .fixup,\"ax\" \n"
Expand Down
18 changes: 9 additions & 9 deletions include/asm-mips/spinlock.h
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ static inline void __raw_spin_lock(raw_spinlock_t *lock)
: "memory");
}

smp_mb();
smp_llsc_mb();
}

static inline void __raw_spin_unlock(raw_spinlock_t *lock)
Expand Down Expand Up @@ -118,7 +118,7 @@ static inline unsigned int __raw_spin_trylock(raw_spinlock_t *lock)
: "memory");
}

smp_mb();
smp_llsc_mb();

return res == 0;
}
Expand Down Expand Up @@ -183,7 +183,7 @@ static inline void __raw_read_lock(raw_rwlock_t *rw)
: "memory");
}

smp_mb();
smp_llsc_mb();
}

/* Note the use of sub, not subu which will make the kernel die with an
Expand All @@ -193,7 +193,7 @@ static inline void __raw_read_unlock(raw_rwlock_t *rw)
{
unsigned int tmp;

smp_mb();
smp_llsc_mb();

if (R10000_LLSC_WAR) {
__asm__ __volatile__(
Expand Down Expand Up @@ -262,7 +262,7 @@ static inline void __raw_write_lock(raw_rwlock_t *rw)
: "memory");
}

smp_mb();
smp_llsc_mb();
}

static inline void __raw_write_unlock(raw_rwlock_t *rw)
Expand Down Expand Up @@ -293,7 +293,7 @@ static inline int __raw_read_trylock(raw_rwlock_t *rw)
" .set reorder \n"
" beqzl %1, 1b \n"
" nop \n"
__WEAK_ORDERING_MB
__WEAK_LLSC_MB
" li %2, 1 \n"
"2: \n"
: "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
Expand All @@ -310,7 +310,7 @@ static inline int __raw_read_trylock(raw_rwlock_t *rw)
" beqz %1, 1b \n"
" nop \n"
" .set reorder \n"
__WEAK_ORDERING_MB
__WEAK_LLSC_MB
" li %2, 1 \n"
"2: \n"
: "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
Expand All @@ -336,7 +336,7 @@ static inline int __raw_write_trylock(raw_rwlock_t *rw)
" sc %1, %0 \n"
" beqzl %1, 1b \n"
" nop \n"
__WEAK_ORDERING_MB
__WEAK_LLSC_MB
" li %2, 1 \n"
" .set reorder \n"
"2: \n"
Expand All @@ -354,7 +354,7 @@ static inline int __raw_write_trylock(raw_rwlock_t *rw)
" beqz %1, 3f \n"
" li %2, 1 \n"
"2: \n"
__WEAK_ORDERING_MB
__WEAK_LLSC_MB
" .subsection 2 \n"
"3: b 1b \n"
" li %2, 0 \n"
Expand Down
8 changes: 4 additions & 4 deletions include/asm-mips/system.h
Original file line number Diff line number Diff line change
Expand Up @@ -117,7 +117,7 @@ static inline unsigned long __xchg_u32(volatile int * m, unsigned int val)
raw_local_irq_restore(flags); /* implies memory barrier */
}

smp_mb();
smp_llsc_mb();

return retval;
}
Expand Down Expand Up @@ -165,7 +165,7 @@ static inline __u64 __xchg_u64(volatile __u64 * m, __u64 val)
raw_local_irq_restore(flags); /* implies memory barrier */
}

smp_mb();
smp_llsc_mb();

return retval;
}
Expand Down Expand Up @@ -246,7 +246,7 @@ static inline unsigned long __cmpxchg_u32(volatile int * m, unsigned long old,
raw_local_irq_restore(flags); /* implies memory barrier */
}

smp_mb();
smp_llsc_mb();

return retval;
}
Expand Down Expand Up @@ -352,7 +352,7 @@ static inline unsigned long __cmpxchg_u64(volatile int * m, unsigned long old,
raw_local_irq_restore(flags); /* implies memory barrier */
}

smp_mb();
smp_llsc_mb();

return retval;
}
Expand Down

0 comments on commit 17099b1

Please sign in to comment.