Skip to content

Commit

Permalink
locking/atomic: make atomic*_{cmp,}xchg optional
Browse files Browse the repository at this point in the history
Most architectures define the atomic/atomic64 xchg and cmpxchg
operations in terms of arch_xchg and arch_cmpxchg respectfully.

Add fallbacks for these cases and remove the trivial cases from arch
code. On some architectures the existing definitions are kept as these
are used to build other arch_atomic*() operations.

Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Kees Cook <keescook@chromium.org>
Link: https://lore.kernel.org/r/20230605070124.3741859-5-mark.rutland@arm.com
  • Loading branch information
Mark Rutland authored and Peter Zijlstra committed Jun 5, 2023
1 parent a7bafa7 commit d12157e
Show file tree
Hide file tree
Showing 23 changed files with 179 additions and 265 deletions.
10 changes: 0 additions & 10 deletions arch/alpha/include/asm/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -200,16 +200,6 @@ ATOMIC_OPS(xor, xor)
#undef ATOMIC_OP_RETURN
#undef ATOMIC_OP

#define arch_atomic64_cmpxchg(v, old, new) \
(arch_cmpxchg(&((v)->counter), old, new))
#define arch_atomic64_xchg(v, new) \
(arch_xchg(&((v)->counter), new))

#define arch_atomic_cmpxchg(v, old, new) \
(arch_cmpxchg(&((v)->counter), old, new))
#define arch_atomic_xchg(v, new) \
(arch_xchg(&((v)->counter), new))

/**
* arch_atomic_fetch_add_unless - add unless the number is a given value
* @v: pointer of type atomic_t
Expand Down
24 changes: 0 additions & 24 deletions arch/arc/include/asm/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -22,30 +22,6 @@
#include <asm/atomic-spinlock.h>
#endif

#define arch_atomic_cmpxchg(v, o, n) \
({ \
arch_cmpxchg(&((v)->counter), (o), (n)); \
})

#ifdef arch_cmpxchg_relaxed
#define arch_atomic_cmpxchg_relaxed(v, o, n) \
({ \
arch_cmpxchg_relaxed(&((v)->counter), (o), (n)); \
})
#endif

#define arch_atomic_xchg(v, n) \
({ \
arch_xchg(&((v)->counter), (n)); \
})

#ifdef arch_xchg_relaxed
#define arch_atomic_xchg_relaxed(v, n) \
({ \
arch_xchg_relaxed(&((v)->counter), (n)); \
})
#endif

/*
* 64-bit atomics
*/
Expand Down
2 changes: 2 additions & 0 deletions arch/arc/include/asm/atomic64-arcv2.h
Original file line number Diff line number Diff line change
Expand Up @@ -159,6 +159,7 @@ arch_atomic64_cmpxchg(atomic64_t *ptr, s64 expected, s64 new)

return prev;
}
#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg

static inline s64 arch_atomic64_xchg(atomic64_t *ptr, s64 new)
{
Expand All @@ -179,6 +180,7 @@ static inline s64 arch_atomic64_xchg(atomic64_t *ptr, s64 new)

return prev;
}
#define arch_atomic64_xchg arch_atomic64_xchg

/**
* arch_atomic64_dec_if_positive - decrement by 1 if old value positive
Expand Down
3 changes: 1 addition & 2 deletions arch/arm/include/asm/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,7 @@ static inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)

return ret;
}
#define arch_atomic_cmpxchg arch_atomic_cmpxchg

#define arch_atomic_fetch_andnot arch_atomic_fetch_andnot

Expand Down Expand Up @@ -240,8 +241,6 @@ ATOMIC_OPS(xor, ^=, eor)
#undef ATOMIC_OP_RETURN
#undef ATOMIC_OP

#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))

#ifndef CONFIG_GENERIC_ATOMIC64
typedef struct {
s64 counter;
Expand Down
28 changes: 0 additions & 28 deletions arch/arm64/include/asm/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -142,24 +142,6 @@ static __always_inline long arch_atomic64_dec_if_positive(atomic64_t *v)
#define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
#define arch_atomic_fetch_xor arch_atomic_fetch_xor

#define arch_atomic_xchg_relaxed(v, new) \
arch_xchg_relaxed(&((v)->counter), (new))
#define arch_atomic_xchg_acquire(v, new) \
arch_xchg_acquire(&((v)->counter), (new))
#define arch_atomic_xchg_release(v, new) \
arch_xchg_release(&((v)->counter), (new))
#define arch_atomic_xchg(v, new) \
arch_xchg(&((v)->counter), (new))

#define arch_atomic_cmpxchg_relaxed(v, old, new) \
arch_cmpxchg_relaxed(&((v)->counter), (old), (new))
#define arch_atomic_cmpxchg_acquire(v, old, new) \
arch_cmpxchg_acquire(&((v)->counter), (old), (new))
#define arch_atomic_cmpxchg_release(v, old, new) \
arch_cmpxchg_release(&((v)->counter), (old), (new))
#define arch_atomic_cmpxchg(v, old, new) \
arch_cmpxchg(&((v)->counter), (old), (new))

#define arch_atomic_andnot arch_atomic_andnot

/*
Expand Down Expand Up @@ -209,16 +191,6 @@ static __always_inline long arch_atomic64_dec_if_positive(atomic64_t *v)
#define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
#define arch_atomic64_fetch_xor arch_atomic64_fetch_xor

#define arch_atomic64_xchg_relaxed arch_atomic_xchg_relaxed
#define arch_atomic64_xchg_acquire arch_atomic_xchg_acquire
#define arch_atomic64_xchg_release arch_atomic_xchg_release
#define arch_atomic64_xchg arch_atomic_xchg

#define arch_atomic64_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed
#define arch_atomic64_cmpxchg_acquire arch_atomic_cmpxchg_acquire
#define arch_atomic64_cmpxchg_release arch_atomic_cmpxchg_release
#define arch_atomic64_cmpxchg arch_atomic_cmpxchg

#define arch_atomic64_andnot arch_atomic64_andnot

#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
Expand Down
35 changes: 0 additions & 35 deletions arch/csky/include/asm/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -195,41 +195,6 @@ arch_atomic_dec_if_positive(atomic_t *v)
}
#define arch_atomic_dec_if_positive arch_atomic_dec_if_positive

#define ATOMIC_OP() \
static __always_inline \
int arch_atomic_xchg_relaxed(atomic_t *v, int n) \
{ \
return __xchg_relaxed(n, &(v->counter), 4); \
} \
static __always_inline \
int arch_atomic_cmpxchg_relaxed(atomic_t *v, int o, int n) \
{ \
return __cmpxchg_relaxed(&(v->counter), o, n, 4); \
} \
static __always_inline \
int arch_atomic_cmpxchg_acquire(atomic_t *v, int o, int n) \
{ \
return __cmpxchg_acquire(&(v->counter), o, n, 4); \
} \
static __always_inline \
int arch_atomic_cmpxchg(atomic_t *v, int o, int n) \
{ \
return __cmpxchg(&(v->counter), o, n, 4); \
}

#define ATOMIC_OPS() \
ATOMIC_OP()

ATOMIC_OPS()

#define arch_atomic_xchg_relaxed arch_atomic_xchg_relaxed
#define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg_relaxed
#define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire
#define arch_atomic_cmpxchg arch_atomic_cmpxchg

#undef ATOMIC_OPS
#undef ATOMIC_OP

#else
#include <asm-generic/atomic.h>
#endif
Expand Down
6 changes: 0 additions & 6 deletions arch/hexagon/include/asm/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -36,12 +36,6 @@ static inline void arch_atomic_set(atomic_t *v, int new)
*/
#define arch_atomic_read(v) READ_ONCE((v)->counter)

#define arch_atomic_xchg(v, new) \
(arch_xchg(&((v)->counter), (new)))

#define arch_atomic_cmpxchg(v, old, new) \
(arch_cmpxchg(&((v)->counter), (old), (new)))

#define ATOMIC_OP(op) \
static inline void arch_atomic_##op(int i, atomic_t *v) \
{ \
Expand Down
7 changes: 0 additions & 7 deletions arch/ia64/include/asm/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -207,13 +207,6 @@ ATOMIC64_FETCH_OP(xor, ^)
#undef ATOMIC64_FETCH_OP
#undef ATOMIC64_OP

#define arch_atomic_cmpxchg(v, old, new) (arch_cmpxchg(&((v)->counter), old, new))
#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))

#define arch_atomic64_cmpxchg(v, old, new) \
(arch_cmpxchg(&((v)->counter), old, new))
#define arch_atomic64_xchg(v, new) (arch_xchg(&((v)->counter), new))

#define arch_atomic_add(i,v) (void)arch_atomic_add_return((i), (v))
#define arch_atomic_sub(i,v) (void)arch_atomic_sub_return((i), (v))

Expand Down
7 changes: 0 additions & 7 deletions arch/loongarch/include/asm/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -181,9 +181,6 @@ static inline int arch_atomic_sub_if_positive(int i, atomic_t *v)
return result;
}

#define arch_atomic_cmpxchg(v, o, n) (arch_cmpxchg(&((v)->counter), (o), (n)))
#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), (new)))

/*
* arch_atomic_dec_if_positive - decrement by 1 if old value positive
* @v: pointer of type atomic_t
Expand Down Expand Up @@ -342,10 +339,6 @@ static inline long arch_atomic64_sub_if_positive(long i, atomic64_t *v)
return result;
}

#define arch_atomic64_cmpxchg(v, o, n) \
((__typeof__((v)->counter))arch_cmpxchg(&((v)->counter), (o), (n)))
#define arch_atomic64_xchg(v, new) (arch_xchg(&((v)->counter), (new)))

/*
* arch_atomic64_dec_if_positive - decrement by 1 if old value positive
* @v: pointer of type atomic64_t
Expand Down
9 changes: 3 additions & 6 deletions arch/m68k/include/asm/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -158,12 +158,7 @@ static inline int arch_atomic_inc_and_test(atomic_t *v)
}
#define arch_atomic_inc_and_test arch_atomic_inc_and_test

#ifdef CONFIG_RMW_INSNS

#define arch_atomic_cmpxchg(v, o, n) ((int)arch_cmpxchg(&((v)->counter), (o), (n)))
#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))

#else /* !CONFIG_RMW_INSNS */
#ifndef CONFIG_RMW_INSNS

static inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
{
Expand All @@ -177,6 +172,7 @@ static inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)
local_irq_restore(flags);
return prev;
}
#define arch_atomic_cmpxchg arch_atomic_cmpxchg

static inline int arch_atomic_xchg(atomic_t *v, int new)
{
Expand All @@ -189,6 +185,7 @@ static inline int arch_atomic_xchg(atomic_t *v, int new)
local_irq_restore(flags);
return prev;
}
#define arch_atomic_xchg arch_atomic_xchg

#endif /* !CONFIG_RMW_INSNS */

Expand Down
11 changes: 0 additions & 11 deletions arch/mips/include/asm/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -33,17 +33,6 @@ static __always_inline void arch_##pfx##_set(pfx##_t *v, type i) \
{ \
WRITE_ONCE(v->counter, i); \
} \
\
static __always_inline type \
arch_##pfx##_cmpxchg(pfx##_t *v, type o, type n) \
{ \
return arch_cmpxchg(&v->counter, o, n); \
} \
\
static __always_inline type arch_##pfx##_xchg(pfx##_t *v, type n) \
{ \
return arch_xchg(&v->counter, n); \
}

ATOMIC_OPS(atomic, int)

Expand Down
3 changes: 0 additions & 3 deletions arch/openrisc/include/asm/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,4 @@ static inline int arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)

#include <asm/cmpxchg.h>

#define arch_atomic_xchg(ptr, v) (arch_xchg(&(ptr)->counter, (v)))
#define arch_atomic_cmpxchg(v, old, new) (arch_cmpxchg(&((v)->counter), (old), (new)))

#endif /* __ASM_OPENRISC_ATOMIC_H */
9 changes: 0 additions & 9 deletions arch/parisc/include/asm/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -73,10 +73,6 @@ static __inline__ int arch_atomic_read(const atomic_t *v)
return READ_ONCE((v)->counter);
}

/* exported interface */
#define arch_atomic_cmpxchg(v, o, n) (arch_cmpxchg(&((v)->counter), (o), (n)))
#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))

#define ATOMIC_OP(op, c_op) \
static __inline__ void arch_atomic_##op(int i, atomic_t *v) \
{ \
Expand Down Expand Up @@ -218,11 +214,6 @@ arch_atomic64_read(const atomic64_t *v)
return READ_ONCE((v)->counter);
}

/* exported interface */
#define arch_atomic64_cmpxchg(v, o, n) \
((__typeof__((v)->counter))arch_cmpxchg(&((v)->counter), (o), (n)))
#define arch_atomic64_xchg(v, new) (arch_xchg(&((v)->counter), new))

#endif /* !CONFIG_64BIT */


Expand Down
24 changes: 0 additions & 24 deletions arch/powerpc/include/asm/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -126,18 +126,6 @@ ATOMIC_OPS(xor, xor, "", K)
#undef ATOMIC_OP_RETURN_RELAXED
#undef ATOMIC_OP

#define arch_atomic_cmpxchg(v, o, n) \
(arch_cmpxchg(&((v)->counter), (o), (n)))
#define arch_atomic_cmpxchg_relaxed(v, o, n) \
arch_cmpxchg_relaxed(&((v)->counter), (o), (n))
#define arch_atomic_cmpxchg_acquire(v, o, n) \
arch_cmpxchg_acquire(&((v)->counter), (o), (n))

#define arch_atomic_xchg(v, new) \
(arch_xchg(&((v)->counter), new))
#define arch_atomic_xchg_relaxed(v, new) \
arch_xchg_relaxed(&((v)->counter), (new))

/**
* atomic_fetch_add_unless - add unless the number is a given value
* @v: pointer of type atomic_t
Expand Down Expand Up @@ -396,18 +384,6 @@ static __inline__ s64 arch_atomic64_dec_if_positive(atomic64_t *v)
}
#define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive

#define arch_atomic64_cmpxchg(v, o, n) \
(arch_cmpxchg(&((v)->counter), (o), (n)))
#define arch_atomic64_cmpxchg_relaxed(v, o, n) \
arch_cmpxchg_relaxed(&((v)->counter), (o), (n))
#define arch_atomic64_cmpxchg_acquire(v, o, n) \
arch_cmpxchg_acquire(&((v)->counter), (o), (n))

#define arch_atomic64_xchg(v, new) \
(arch_xchg(&((v)->counter), new))
#define arch_atomic64_xchg_relaxed(v, new) \
arch_xchg_relaxed(&((v)->counter), (new))

/**
* atomic64_fetch_add_unless - add unless the number is a given value
* @v: pointer of type atomic64_t
Expand Down
Loading

0 comments on commit d12157e

Please sign in to comment.