Skip to content

Commit

Permalink
locking/atomic: Fix atomic64_relaxed() bits
Browse files Browse the repository at this point in the history
We should only expand the atomic64 relaxed bits once we've included
all relevant headers. So move it down until after we potentially
include asm-generic/atomic64.h.

In practise this will not have made a difference so far, since the
generic bits will not define _relaxed versions.

Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Cc: Andrew Morton <akpm@linux-foundation.org>
Cc: Davidlohr Bueso <dave@stgolabs.net>
Cc: Frederic Weisbecker <fweisbec@gmail.com>
Cc: Linus Torvalds <torvalds@linux-foundation.org>
Cc: Paul E. McKenney <paulmck@linux.vnet.ibm.com>
Cc: Peter Zijlstra <peterz@infradead.org>
Cc: Thomas Gleixner <tglx@linutronix.de>
Cc: Will Deacon <will.deacon@arm.com>
Cc: linux-arch@vger.kernel.org
Cc: linux-kernel@vger.kernel.org
Signed-off-by: Ingo Molnar <mingo@kernel.org>
  • Loading branch information
Peter Zijlstra authored and Ingo Molnar committed Jun 16, 2016
1 parent 6dc2587 commit e121333
Showing 1 changed file with 153 additions and 153 deletions.
306 changes: 153 additions & 153 deletions include/linux/atomic.h
Original file line number Diff line number Diff line change
Expand Up @@ -211,159 +211,6 @@
#endif
#endif /* atomic_cmpxchg_relaxed */

#ifndef atomic64_read_acquire
#define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
#endif

#ifndef atomic64_set_release
#define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
#endif

/* atomic64_add_return_relaxed */
#ifndef atomic64_add_return_relaxed
#define atomic64_add_return_relaxed atomic64_add_return
#define atomic64_add_return_acquire atomic64_add_return
#define atomic64_add_return_release atomic64_add_return

#else /* atomic64_add_return_relaxed */

#ifndef atomic64_add_return_acquire
#define atomic64_add_return_acquire(...) \
__atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
#endif

#ifndef atomic64_add_return_release
#define atomic64_add_return_release(...) \
__atomic_op_release(atomic64_add_return, __VA_ARGS__)
#endif

#ifndef atomic64_add_return
#define atomic64_add_return(...) \
__atomic_op_fence(atomic64_add_return, __VA_ARGS__)
#endif
#endif /* atomic64_add_return_relaxed */

/* atomic64_inc_return_relaxed */
#ifndef atomic64_inc_return_relaxed
#define atomic64_inc_return_relaxed atomic64_inc_return
#define atomic64_inc_return_acquire atomic64_inc_return
#define atomic64_inc_return_release atomic64_inc_return

#else /* atomic64_inc_return_relaxed */

#ifndef atomic64_inc_return_acquire
#define atomic64_inc_return_acquire(...) \
__atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
#endif

#ifndef atomic64_inc_return_release
#define atomic64_inc_return_release(...) \
__atomic_op_release(atomic64_inc_return, __VA_ARGS__)
#endif

#ifndef atomic64_inc_return
#define atomic64_inc_return(...) \
__atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
#endif
#endif /* atomic64_inc_return_relaxed */


/* atomic64_sub_return_relaxed */
#ifndef atomic64_sub_return_relaxed
#define atomic64_sub_return_relaxed atomic64_sub_return
#define atomic64_sub_return_acquire atomic64_sub_return
#define atomic64_sub_return_release atomic64_sub_return

#else /* atomic64_sub_return_relaxed */

#ifndef atomic64_sub_return_acquire
#define atomic64_sub_return_acquire(...) \
__atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
#endif

#ifndef atomic64_sub_return_release
#define atomic64_sub_return_release(...) \
__atomic_op_release(atomic64_sub_return, __VA_ARGS__)
#endif

#ifndef atomic64_sub_return
#define atomic64_sub_return(...) \
__atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
#endif
#endif /* atomic64_sub_return_relaxed */

/* atomic64_dec_return_relaxed */
#ifndef atomic64_dec_return_relaxed
#define atomic64_dec_return_relaxed atomic64_dec_return
#define atomic64_dec_return_acquire atomic64_dec_return
#define atomic64_dec_return_release atomic64_dec_return

#else /* atomic64_dec_return_relaxed */

#ifndef atomic64_dec_return_acquire
#define atomic64_dec_return_acquire(...) \
__atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
#endif

#ifndef atomic64_dec_return_release
#define atomic64_dec_return_release(...) \
__atomic_op_release(atomic64_dec_return, __VA_ARGS__)
#endif

#ifndef atomic64_dec_return
#define atomic64_dec_return(...) \
__atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
#endif
#endif /* atomic64_dec_return_relaxed */

/* atomic64_xchg_relaxed */
#ifndef atomic64_xchg_relaxed
#define atomic64_xchg_relaxed atomic64_xchg
#define atomic64_xchg_acquire atomic64_xchg
#define atomic64_xchg_release atomic64_xchg

#else /* atomic64_xchg_relaxed */

#ifndef atomic64_xchg_acquire
#define atomic64_xchg_acquire(...) \
__atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
#endif

#ifndef atomic64_xchg_release
#define atomic64_xchg_release(...) \
__atomic_op_release(atomic64_xchg, __VA_ARGS__)
#endif

#ifndef atomic64_xchg
#define atomic64_xchg(...) \
__atomic_op_fence(atomic64_xchg, __VA_ARGS__)
#endif
#endif /* atomic64_xchg_relaxed */

/* atomic64_cmpxchg_relaxed */
#ifndef atomic64_cmpxchg_relaxed
#define atomic64_cmpxchg_relaxed atomic64_cmpxchg
#define atomic64_cmpxchg_acquire atomic64_cmpxchg
#define atomic64_cmpxchg_release atomic64_cmpxchg

#else /* atomic64_cmpxchg_relaxed */

#ifndef atomic64_cmpxchg_acquire
#define atomic64_cmpxchg_acquire(...) \
__atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
#endif

#ifndef atomic64_cmpxchg_release
#define atomic64_cmpxchg_release(...) \
__atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
#endif

#ifndef atomic64_cmpxchg
#define atomic64_cmpxchg(...) \
__atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
#endif
#endif /* atomic64_cmpxchg_relaxed */

/* cmpxchg_relaxed */
#ifndef cmpxchg_relaxed
#define cmpxchg_relaxed cmpxchg
Expand Down Expand Up @@ -583,6 +430,159 @@ static inline int atomic_fetch_or(int mask, atomic_t *p)
#include <asm-generic/atomic64.h>
#endif

#ifndef atomic64_read_acquire
#define atomic64_read_acquire(v) smp_load_acquire(&(v)->counter)
#endif

#ifndef atomic64_set_release
#define atomic64_set_release(v, i) smp_store_release(&(v)->counter, (i))
#endif

/* atomic64_add_return_relaxed */
#ifndef atomic64_add_return_relaxed
#define atomic64_add_return_relaxed atomic64_add_return
#define atomic64_add_return_acquire atomic64_add_return
#define atomic64_add_return_release atomic64_add_return

#else /* atomic64_add_return_relaxed */

#ifndef atomic64_add_return_acquire
#define atomic64_add_return_acquire(...) \
__atomic_op_acquire(atomic64_add_return, __VA_ARGS__)
#endif

#ifndef atomic64_add_return_release
#define atomic64_add_return_release(...) \
__atomic_op_release(atomic64_add_return, __VA_ARGS__)
#endif

#ifndef atomic64_add_return
#define atomic64_add_return(...) \
__atomic_op_fence(atomic64_add_return, __VA_ARGS__)
#endif
#endif /* atomic64_add_return_relaxed */

/* atomic64_inc_return_relaxed */
#ifndef atomic64_inc_return_relaxed
#define atomic64_inc_return_relaxed atomic64_inc_return
#define atomic64_inc_return_acquire atomic64_inc_return
#define atomic64_inc_return_release atomic64_inc_return

#else /* atomic64_inc_return_relaxed */

#ifndef atomic64_inc_return_acquire
#define atomic64_inc_return_acquire(...) \
__atomic_op_acquire(atomic64_inc_return, __VA_ARGS__)
#endif

#ifndef atomic64_inc_return_release
#define atomic64_inc_return_release(...) \
__atomic_op_release(atomic64_inc_return, __VA_ARGS__)
#endif

#ifndef atomic64_inc_return
#define atomic64_inc_return(...) \
__atomic_op_fence(atomic64_inc_return, __VA_ARGS__)
#endif
#endif /* atomic64_inc_return_relaxed */


/* atomic64_sub_return_relaxed */
#ifndef atomic64_sub_return_relaxed
#define atomic64_sub_return_relaxed atomic64_sub_return
#define atomic64_sub_return_acquire atomic64_sub_return
#define atomic64_sub_return_release atomic64_sub_return

#else /* atomic64_sub_return_relaxed */

#ifndef atomic64_sub_return_acquire
#define atomic64_sub_return_acquire(...) \
__atomic_op_acquire(atomic64_sub_return, __VA_ARGS__)
#endif

#ifndef atomic64_sub_return_release
#define atomic64_sub_return_release(...) \
__atomic_op_release(atomic64_sub_return, __VA_ARGS__)
#endif

#ifndef atomic64_sub_return
#define atomic64_sub_return(...) \
__atomic_op_fence(atomic64_sub_return, __VA_ARGS__)
#endif
#endif /* atomic64_sub_return_relaxed */

/* atomic64_dec_return_relaxed */
#ifndef atomic64_dec_return_relaxed
#define atomic64_dec_return_relaxed atomic64_dec_return
#define atomic64_dec_return_acquire atomic64_dec_return
#define atomic64_dec_return_release atomic64_dec_return

#else /* atomic64_dec_return_relaxed */

#ifndef atomic64_dec_return_acquire
#define atomic64_dec_return_acquire(...) \
__atomic_op_acquire(atomic64_dec_return, __VA_ARGS__)
#endif

#ifndef atomic64_dec_return_release
#define atomic64_dec_return_release(...) \
__atomic_op_release(atomic64_dec_return, __VA_ARGS__)
#endif

#ifndef atomic64_dec_return
#define atomic64_dec_return(...) \
__atomic_op_fence(atomic64_dec_return, __VA_ARGS__)
#endif
#endif /* atomic64_dec_return_relaxed */

/* atomic64_xchg_relaxed */
#ifndef atomic64_xchg_relaxed
#define atomic64_xchg_relaxed atomic64_xchg
#define atomic64_xchg_acquire atomic64_xchg
#define atomic64_xchg_release atomic64_xchg

#else /* atomic64_xchg_relaxed */

#ifndef atomic64_xchg_acquire
#define atomic64_xchg_acquire(...) \
__atomic_op_acquire(atomic64_xchg, __VA_ARGS__)
#endif

#ifndef atomic64_xchg_release
#define atomic64_xchg_release(...) \
__atomic_op_release(atomic64_xchg, __VA_ARGS__)
#endif

#ifndef atomic64_xchg
#define atomic64_xchg(...) \
__atomic_op_fence(atomic64_xchg, __VA_ARGS__)
#endif
#endif /* atomic64_xchg_relaxed */

/* atomic64_cmpxchg_relaxed */
#ifndef atomic64_cmpxchg_relaxed
#define atomic64_cmpxchg_relaxed atomic64_cmpxchg
#define atomic64_cmpxchg_acquire atomic64_cmpxchg
#define atomic64_cmpxchg_release atomic64_cmpxchg

#else /* atomic64_cmpxchg_relaxed */

#ifndef atomic64_cmpxchg_acquire
#define atomic64_cmpxchg_acquire(...) \
__atomic_op_acquire(atomic64_cmpxchg, __VA_ARGS__)
#endif

#ifndef atomic64_cmpxchg_release
#define atomic64_cmpxchg_release(...) \
__atomic_op_release(atomic64_cmpxchg, __VA_ARGS__)
#endif

#ifndef atomic64_cmpxchg
#define atomic64_cmpxchg(...) \
__atomic_op_fence(atomic64_cmpxchg, __VA_ARGS__)
#endif
#endif /* atomic64_cmpxchg_relaxed */

#ifndef atomic64_andnot
static inline void atomic64_andnot(long long i, atomic64_t *v)
{
Expand Down

0 comments on commit e121333

Please sign in to comment.