Skip to content

Commit

Permalink
---
Browse files Browse the repository at this point in the history
yaml
---
r: 54795
b: refs/heads/master
c: 2856f5e
h: refs/heads/master
i:
  54793: a2d08d8
  54791: 0ccd4b5
v: v3
  • Loading branch information
Mathieu Desnoyers authored and Linus Torvalds committed May 8, 2007
1 parent effa4ea commit da18f32
Show file tree
Hide file tree
Showing 19 changed files with 361 additions and 283 deletions.
2 changes: 1 addition & 1 deletion [refs]
Original file line number Diff line number Diff line change
@@ -1,2 +1,2 @@
---
refs/heads/master: 79d365a306c3af53d8a732fec79b76c0b285d816
refs/heads/master: 2856f5e31c1413bf6e4f1371e07e17078a5fee5e
59 changes: 31 additions & 28 deletions trunk/include/asm-alpha/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
#define _ALPHA_ATOMIC_H

#include <asm/barrier.h>
#include <asm/system.h>

/*
* Atomic operations that C can't guarantee us. Useful for
Expand Down Expand Up @@ -190,20 +191,21 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
* Atomically adds @a to @v, so long as it was not @u.
* Returns non-zero if @v was not @u, and zero otherwise.
*/
#define atomic_add_unless(v, a, u) \
({ \
__typeof__((v)->counter) c, old; \
c = atomic_read(v); \
for (;;) { \
if (unlikely(c == (u))) \
break; \
old = atomic_cmpxchg((v), c, c + (a)); \
if (likely(old == c)) \
break; \
c = old; \
} \
c != (u); \
})
static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
{
int c, old;
c = atomic_read(v);
for (;;) {
if (unlikely(c == (u)))
break;
old = atomic_cmpxchg((v), c, c + (a));
if (likely(old == c))
break;
c = old;
}
return c != (u);
}

#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)

/**
Expand All @@ -215,20 +217,21 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
* Atomically adds @a to @v, so long as it was not @u.
* Returns non-zero if @v was not @u, and zero otherwise.
*/
#define atomic64_add_unless(v, a, u) \
({ \
__typeof__((v)->counter) c, old; \
c = atomic64_read(v); \
for (;;) { \
if (unlikely(c == (u))) \
break; \
old = atomic64_cmpxchg((v), c, c + (a)); \
if (likely(old == c)) \
break; \
c = old; \
} \
c != (u); \
})
static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
{
long c, old;
c = atomic64_read(v);
for (;;) {
if (unlikely(c == (u)))
break;
old = atomic64_cmpxchg((v), c, c + (a));
if (likely(old == c))
break;
c = old;
}
return c != (u);
}

#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)

#define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
Expand Down
1 change: 1 addition & 0 deletions trunk/include/asm-arm/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
#define __ASM_ARM_ATOMIC_H

#include <linux/compiler.h>
#include <asm/system.h>

typedef struct { volatile int counter; } atomic_t;

Expand Down
1 change: 0 additions & 1 deletion trunk/include/asm-arm26/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
#ifndef __ASM_ARM_ATOMIC_H
#define __ASM_ARM_ATOMIC_H


#ifdef CONFIG_SMP
#error SMP is NOT supported
#endif
Expand Down
91 changes: 15 additions & 76 deletions trunk/include/asm-frv/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@

#include <linux/types.h>
#include <asm/spr-regs.h>
#include <asm/system.h>

#ifdef CONFIG_SMP
#error not SMP safe
Expand Down Expand Up @@ -258,85 +259,23 @@ extern uint32_t __xchg_32(uint32_t i, volatile void *v);

#define tas(ptr) (xchg((ptr), 1))

/*****************************************************************************/
/*
* compare and conditionally exchange value with memory
* - if (*ptr == test) then orig = *ptr; *ptr = test;
* - if (*ptr != test) then orig = *ptr;
*/
#ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS

#define cmpxchg(ptr, test, new) \
({ \
__typeof__(ptr) __xg_ptr = (ptr); \
__typeof__(*(ptr)) __xg_orig, __xg_tmp; \
__typeof__(*(ptr)) __xg_test = (test); \
__typeof__(*(ptr)) __xg_new = (new); \
\
switch (sizeof(__xg_orig)) { \
case 4: \
asm volatile( \
"0: \n" \
" orcc gr0,gr0,gr0,icc3 \n" \
" ckeq icc3,cc7 \n" \
" ld.p %M0,%1 \n" \
" orcr cc7,cc7,cc3 \n" \
" sub%I4cc %1,%4,%2,icc0 \n" \
" bne icc0,#0,1f \n" \
" cst.p %3,%M0 ,cc3,#1 \n" \
" corcc gr29,gr29,gr0 ,cc3,#1 \n" \
" beq icc3,#0,0b \n" \
"1: \n" \
: "+U"(*__xg_ptr), "=&r"(__xg_orig), "=&r"(__xg_tmp) \
: "r"(__xg_new), "NPr"(__xg_test) \
: "memory", "cc7", "cc3", "icc3", "icc0" \
); \
break; \
\
default: \
__xg_orig = 0; \
asm volatile("break"); \
break; \
} \
\
__xg_orig; \
})

#else

extern uint32_t __cmpxchg_32(uint32_t *v, uint32_t test, uint32_t new);

#define cmpxchg(ptr, test, new) \
({ \
__typeof__(ptr) __xg_ptr = (ptr); \
__typeof__(*(ptr)) __xg_orig; \
__typeof__(*(ptr)) __xg_test = (test); \
__typeof__(*(ptr)) __xg_new = (new); \
\
switch (sizeof(__xg_orig)) { \
case 4: __xg_orig = __cmpxchg_32(__xg_ptr, __xg_test, __xg_new); break; \
default: \
__xg_orig = 0; \
asm volatile("break"); \
break; \
} \
\
__xg_orig; \
})

#endif

#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
#define atomic_xchg(v, new) (xchg(&((v)->counter), new))

#define atomic_add_unless(v, a, u) \
({ \
int c, old; \
c = atomic_read(v); \
while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
c = old; \
c != (u); \
})
static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
{
int c, old;
c = atomic_read(v);
for (;;) {
if (unlikely(c == (u)))
break;
old = atomic_cmpxchg((v), c, c + (a));
if (likely(old == c))
break;
c = old;
}
return c != (u);
}

#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)

Expand Down
70 changes: 69 additions & 1 deletion trunk/include/asm-frv/system.h
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
#define _ASM_SYSTEM_H

#include <linux/linkage.h>
#include <asm/atomic.h>

struct thread_struct;

Expand Down Expand Up @@ -197,4 +196,73 @@ extern void free_initmem(void);

#define arch_align_stack(x) (x)

/*****************************************************************************/
/*
* compare and conditionally exchange value with memory
* - if (*ptr == test) then orig = *ptr; *ptr = test;
* - if (*ptr != test) then orig = *ptr;
*/
#ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS

#define cmpxchg(ptr, test, new) \
({ \
__typeof__(ptr) __xg_ptr = (ptr); \
__typeof__(*(ptr)) __xg_orig, __xg_tmp; \
__typeof__(*(ptr)) __xg_test = (test); \
__typeof__(*(ptr)) __xg_new = (new); \
\
switch (sizeof(__xg_orig)) { \
case 4: \
asm volatile( \
"0: \n" \
" orcc gr0,gr0,gr0,icc3 \n" \
" ckeq icc3,cc7 \n" \
" ld.p %M0,%1 \n" \
" orcr cc7,cc7,cc3 \n" \
" sub%I4cc %1,%4,%2,icc0 \n" \
" bne icc0,#0,1f \n" \
" cst.p %3,%M0 ,cc3,#1 \n" \
" corcc gr29,gr29,gr0 ,cc3,#1 \n" \
" beq icc3,#0,0b \n" \
"1: \n" \
: "+U"(*__xg_ptr), "=&r"(__xg_orig), "=&r"(__xg_tmp) \
: "r"(__xg_new), "NPr"(__xg_test) \
: "memory", "cc7", "cc3", "icc3", "icc0" \
); \
break; \
\
default: \
__xg_orig = 0; \
asm volatile("break"); \
break; \
} \
\
__xg_orig; \
})

#else

extern uint32_t __cmpxchg_32(uint32_t *v, uint32_t test, uint32_t new);

#define cmpxchg(ptr, test, new) \
({ \
__typeof__(ptr) __xg_ptr = (ptr); \
__typeof__(*(ptr)) __xg_orig; \
__typeof__(*(ptr)) __xg_test = (test); \
__typeof__(*(ptr)) __xg_new = (new); \
\
switch (sizeof(__xg_orig)) { \
case 4: __xg_orig = __cmpxchg_32(__xg_ptr, __xg_test, __xg_new); break; \
default: \
__xg_orig = 0; \
asm volatile("break"); \
break; \
} \
\
__xg_orig; \
})

#endif


#endif /* _ASM_SYSTEM_H */
17 changes: 12 additions & 5 deletions trunk/include/asm-generic/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,6 @@
*/

#include <asm/types.h>
#include <asm/system.h>

/*
* Suppport for atomic_long_t
Expand Down Expand Up @@ -123,8 +122,12 @@ static inline long atomic_long_dec_return(atomic_long_t *l)
return (long)atomic64_dec_return(v);
}

#define atomic_long_add_unless(l, a, u) \
atomic64_add_unless((atomic64_t *)(l), (a), (u))
static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
{
atomic64_t *v = (atomic64_t *)l;

return (long)atomic64_add_unless(v, a, u);
}

#define atomic_long_inc_not_zero(l) atomic64_inc_not_zero((atomic64_t *)(l))

Expand Down Expand Up @@ -236,8 +239,12 @@ static inline long atomic_long_dec_return(atomic_long_t *l)
return (long)atomic_dec_return(v);
}

#define atomic_long_add_unless(l, a, u) \
atomic_add_unless((atomic_t *)(l), (a), (u))
static inline long atomic_long_add_unless(atomic_long_t *l, long a, long u)
{
atomic_t *v = (atomic_t *)l;

return (long)atomic_add_unless(v, a, u);
}

#define atomic_long_inc_not_zero(l) atomic_inc_not_zero((atomic_t *)(l))

Expand Down
29 changes: 15 additions & 14 deletions trunk/include/asm-i386/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -219,20 +219,21 @@ static __inline__ int atomic_sub_return(int i, atomic_t *v)
* Atomically adds @a to @v, so long as @v was not already @u.
* Returns non-zero if @v was not @u, and zero otherwise.
*/
#define atomic_add_unless(v, a, u) \
({ \
__typeof__((v)->counter) c, old; \
c = atomic_read(v); \
for (;;) { \
if (unlikely(c == (u))) \
break; \
old = atomic_cmpxchg((v), c, c + (a)); \
if (likely(old == c)) \
break; \
c = old; \
} \
c != (u); \
})
static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
{
int c, old;
c = atomic_read(v);
for (;;) {
if (unlikely(c == (u)))
break;
old = atomic_cmpxchg((v), c, c + (a));
if (likely(old == c))
break;
c = old;
}
return c != (u);
}

#define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)

#define atomic_inc_return(v) (atomic_add_return(1,v))
Expand Down
Loading

0 comments on commit da18f32

Please sign in to comment.