Skip to content

Commit

Permalink
[SPARC64]: Implement atomic backoff.
Browse files Browse the repository at this point in the history
When the cpu count is high and contention hits an atomic object, the
processors can synchronize such that some cpus continually get knocked
out and cannot complete the atomic update.

So implement an exponential backoff when SMP.

Signed-off-by: David S. Miller <davem@davemloft.net>
  • Loading branch information
David S. Miller committed Oct 17, 2007
1 parent d85714d commit 24f287e
Show file tree
Hide file tree
Showing 3 changed files with 76 additions and 20 deletions.
38 changes: 27 additions & 11 deletions arch/sparc64/lib/atomic.S
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
/* $Id: atomic.S,v 1.4 2001/11/18 00:12:56 davem Exp $
* atomic.S: These things are too big to do inline.
/* atomic.S: These things are too big to do inline.
*
* Copyright (C) 1999 David S. Miller (davem@redhat.com)
* Copyright (C) 1999, 2007 David S. Miller (davem@davemloft.net)
*/

#include <asm/asi.h>
#include <asm/backoff.h>

.text

Expand All @@ -16,27 +16,31 @@
.globl atomic_add
.type atomic_add,#function
atomic_add: /* %o0 = increment, %o1 = atomic_ptr */
BACKOFF_SETUP(%o2)
1: lduw [%o1], %g1
add %g1, %o0, %g7
cas [%o1], %g1, %g7
cmp %g1, %g7
bne,pn %icc, 1b
bne,pn %icc, 2f
nop
retl
nop
2: BACKOFF_SPIN(%o2, %o3, 1b)
.size atomic_add, .-atomic_add

.globl atomic_sub
.type atomic_sub,#function
atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
BACKOFF_SETUP(%o2)
1: lduw [%o1], %g1
sub %g1, %o0, %g7
cas [%o1], %g1, %g7
cmp %g1, %g7
bne,pn %icc, 1b
bne,pn %icc, 2f
nop
retl
nop
2: BACKOFF_SPIN(%o2, %o3, 1b)
.size atomic_sub, .-atomic_sub

/* On SMP we need to use memory barriers to ensure
Expand All @@ -60,89 +64,101 @@ atomic_sub: /* %o0 = decrement, %o1 = atomic_ptr */
.globl atomic_add_ret
.type atomic_add_ret,#function
atomic_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
BACKOFF_SETUP(%o2)
ATOMIC_PRE_BARRIER
1: lduw [%o1], %g1
add %g1, %o0, %g7
cas [%o1], %g1, %g7
cmp %g1, %g7
bne,pn %icc, 1b
bne,pn %icc, 2f
add %g7, %o0, %g7
sra %g7, 0, %o0
ATOMIC_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o2, %o3, 1b)
.size atomic_add_ret, .-atomic_add_ret

.globl atomic_sub_ret
.type atomic_sub_ret,#function
atomic_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
BACKOFF_SETUP(%o2)
ATOMIC_PRE_BARRIER
1: lduw [%o1], %g1
sub %g1, %o0, %g7
cas [%o1], %g1, %g7
cmp %g1, %g7
bne,pn %icc, 1b
bne,pn %icc, 2f
sub %g7, %o0, %g7
sra %g7, 0, %o0
ATOMIC_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o2, %o3, 1b)
.size atomic_sub_ret, .-atomic_sub_ret

.globl atomic64_add
.type atomic64_add,#function
atomic64_add: /* %o0 = increment, %o1 = atomic_ptr */
BACKOFF_SETUP(%o2)
1: ldx [%o1], %g1
add %g1, %o0, %g7
casx [%o1], %g1, %g7
cmp %g1, %g7
bne,pn %xcc, 1b
bne,pn %xcc, 2f
nop
retl
nop
2: BACKOFF_SPIN(%o2, %o3, 1b)
.size atomic64_add, .-atomic64_add

.globl atomic64_sub
.type atomic64_sub,#function
atomic64_sub: /* %o0 = decrement, %o1 = atomic_ptr */
BACKOFF_SETUP(%o2)
1: ldx [%o1], %g1
sub %g1, %o0, %g7
casx [%o1], %g1, %g7
cmp %g1, %g7
bne,pn %xcc, 1b
bne,pn %xcc, 2f
nop
retl
nop
2: BACKOFF_SPIN(%o2, %o3, 1b)
.size atomic64_sub, .-atomic64_sub

.globl atomic64_add_ret
.type atomic64_add_ret,#function
atomic64_add_ret: /* %o0 = increment, %o1 = atomic_ptr */
BACKOFF_SETUP(%o2)
ATOMIC_PRE_BARRIER
1: ldx [%o1], %g1
add %g1, %o0, %g7
casx [%o1], %g1, %g7
cmp %g1, %g7
bne,pn %xcc, 1b
bne,pn %xcc, 2f
add %g7, %o0, %g7
mov %g7, %o0
ATOMIC_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o2, %o3, 1b)
.size atomic64_add_ret, .-atomic64_add_ret

.globl atomic64_sub_ret
.type atomic64_sub_ret,#function
atomic64_sub_ret: /* %o0 = decrement, %o1 = atomic_ptr */
BACKOFF_SETUP(%o2)
ATOMIC_PRE_BARRIER
1: ldx [%o1], %g1
sub %g1, %o0, %g7
casx [%o1], %g1, %g7
cmp %g1, %g7
bne,pn %xcc, 1b
bne,pn %xcc, 2f
sub %g7, %o0, %g7
mov %g7, %o0
ATOMIC_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o2, %o3, 1b)
.size atomic64_sub_ret, .-atomic64_sub_ret
30 changes: 21 additions & 9 deletions arch/sparc64/lib/bitops.S
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
/* $Id: bitops.S,v 1.3 2001/11/18 00:12:56 davem Exp $
* bitops.S: Sparc64 atomic bit operations.
/* bitops.S: Sparc64 atomic bit operations.
*
* Copyright (C) 2000 David S. Miller (davem@redhat.com)
* Copyright (C) 2000, 2007 David S. Miller (davem@davemloft.net)
*/

#include <asm/asi.h>
#include <asm/backoff.h>

.text

Expand All @@ -29,6 +29,7 @@
.globl test_and_set_bit
.type test_and_set_bit,#function
test_and_set_bit: /* %o0=nr, %o1=addr */
BACKOFF_SETUP(%o3)
BITOP_PRE_BARRIER
srlx %o0, 6, %g1
mov 1, %o2
Expand All @@ -40,18 +41,20 @@ test_and_set_bit: /* %o0=nr, %o1=addr */
or %g7, %o2, %g1
casx [%o1], %g7, %g1
cmp %g7, %g1
bne,pn %xcc, 1b
bne,pn %xcc, 2f
and %g7, %o2, %g2
clr %o0
movrne %g2, 1, %o0
BITOP_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o3, %o4, 1b)
.size test_and_set_bit, .-test_and_set_bit

.globl test_and_clear_bit
.type test_and_clear_bit,#function
test_and_clear_bit: /* %o0=nr, %o1=addr */
BACKOFF_SETUP(%o3)
BITOP_PRE_BARRIER
srlx %o0, 6, %g1
mov 1, %o2
Expand All @@ -63,18 +66,20 @@ test_and_clear_bit: /* %o0=nr, %o1=addr */
andn %g7, %o2, %g1
casx [%o1], %g7, %g1
cmp %g7, %g1
bne,pn %xcc, 1b
bne,pn %xcc, 2f
and %g7, %o2, %g2
clr %o0
movrne %g2, 1, %o0
BITOP_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o3, %o4, 1b)
.size test_and_clear_bit, .-test_and_clear_bit

.globl test_and_change_bit
.type test_and_change_bit,#function
test_and_change_bit: /* %o0=nr, %o1=addr */
BACKOFF_SETUP(%o3)
BITOP_PRE_BARRIER
srlx %o0, 6, %g1
mov 1, %o2
Expand All @@ -86,18 +91,20 @@ test_and_change_bit: /* %o0=nr, %o1=addr */
xor %g7, %o2, %g1
casx [%o1], %g7, %g1
cmp %g7, %g1
bne,pn %xcc, 1b
bne,pn %xcc, 2f
and %g7, %o2, %g2
clr %o0
movrne %g2, 1, %o0
BITOP_POST_BARRIER
retl
nop
2: BACKOFF_SPIN(%o3, %o4, 1b)
.size test_and_change_bit, .-test_and_change_bit

.globl set_bit
.type set_bit,#function
set_bit: /* %o0=nr, %o1=addr */
BACKOFF_SETUP(%o3)
srlx %o0, 6, %g1
mov 1, %o2
sllx %g1, 3, %g3
Expand All @@ -108,15 +115,17 @@ set_bit: /* %o0=nr, %o1=addr */
or %g7, %o2, %g1
casx [%o1], %g7, %g1
cmp %g7, %g1
bne,pn %xcc, 1b
bne,pn %xcc, 2f
nop
retl
nop
2: BACKOFF_SPIN(%o3, %o4, 1b)
.size set_bit, .-set_bit

.globl clear_bit
.type clear_bit,#function
clear_bit: /* %o0=nr, %o1=addr */
BACKOFF_SETUP(%o3)
srlx %o0, 6, %g1
mov 1, %o2
sllx %g1, 3, %g3
Expand All @@ -127,15 +136,17 @@ clear_bit: /* %o0=nr, %o1=addr */
andn %g7, %o2, %g1
casx [%o1], %g7, %g1
cmp %g7, %g1
bne,pn %xcc, 1b
bne,pn %xcc, 2f
nop
retl
nop
2: BACKOFF_SPIN(%o3, %o4, 1b)
.size clear_bit, .-clear_bit

.globl change_bit
.type change_bit,#function
change_bit: /* %o0=nr, %o1=addr */
BACKOFF_SETUP(%o3)
srlx %o0, 6, %g1
mov 1, %o2
sllx %g1, 3, %g3
Expand All @@ -146,8 +157,9 @@ change_bit: /* %o0=nr, %o1=addr */
xor %g7, %o2, %g1
casx [%o1], %g7, %g1
cmp %g7, %g1
bne,pn %xcc, 1b
bne,pn %xcc, 2f
nop
retl
nop
2: BACKOFF_SPIN(%o3, %o4, 1b)
.size change_bit, .-change_bit
28 changes: 28 additions & 0 deletions include/asm-sparc64/backoff.h
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
#ifndef _SPARC64_BACKOFF_H
#define _SPARC64_BACKOFF_H

#define BACKOFF_LIMIT (4 * 1024)

#ifdef CONFIG_SMP

#define BACKOFF_SETUP(reg) \
mov 1, reg

#define BACKOFF_SPIN(reg, tmp, label) \
mov reg, tmp; \
88: brnz,pt tmp, 88b; \
sub tmp, 1, tmp; \
cmp reg, BACKOFF_LIMIT; \
bg,pn %xcc, label; \
nop; \
ba,pt %xcc, label; \
sllx reg, 1, reg;

#else

#define BACKOFF_SETUP(reg)
#define BACKOFF_SPIN(reg, tmp, label)

#endif

#endif /* _SPARC64_BACKOFF_H */

0 comments on commit 24f287e

Please sign in to comment.