Skip to content

Commit

Permalink
Merge branch 'locking/rcuref' of git://git.kernel.org/pub/scm/linux/k…
Browse files Browse the repository at this point in the history
…ernel/git/tip/tip

Pulling rcurefs from Peter for tglx's work.

Link: https://lore.kernel.org/all/20230328084534.GE4253@hirez.programming.kicks-ass.net/
Signed-off-by: Jakub Kicinski <kuba@kernel.org>
  • Loading branch information
Jakub Kicinski committed Mar 29, 2023
2 parents 163c2c7 + ee1ee6d commit b133fff
Show file tree
Hide file tree
Showing 9 changed files with 752 additions and 19 deletions.
208 changes: 199 additions & 9 deletions include/linux/atomic/atomic-arch-fallback.h
Original file line number Diff line number Diff line change
Expand Up @@ -1208,15 +1208,21 @@ arch_atomic_inc_and_test(atomic_t *v)
#define arch_atomic_inc_and_test arch_atomic_inc_and_test
#endif

#ifndef arch_atomic_add_negative_relaxed
#ifdef arch_atomic_add_negative
#define arch_atomic_add_negative_acquire arch_atomic_add_negative
#define arch_atomic_add_negative_release arch_atomic_add_negative
#define arch_atomic_add_negative_relaxed arch_atomic_add_negative
#endif /* arch_atomic_add_negative */

#ifndef arch_atomic_add_negative
/**
* arch_atomic_add_negative - add and test if negative
* arch_atomic_add_negative - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic_t
*
* Atomically adds @i to @v and returns true
* if the result is negative, or false when
* result is greater than or equal to zero.
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic_add_negative(int i, atomic_t *v)
Expand All @@ -1226,6 +1232,95 @@ arch_atomic_add_negative(int i, atomic_t *v)
#define arch_atomic_add_negative arch_atomic_add_negative
#endif

#ifndef arch_atomic_add_negative_acquire
/**
* arch_atomic_add_negative_acquire - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic_add_negative_acquire(int i, atomic_t *v)
{
return arch_atomic_add_return_acquire(i, v) < 0;
}
#define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire
#endif

#ifndef arch_atomic_add_negative_release
/**
* arch_atomic_add_negative_release - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic_add_negative_release(int i, atomic_t *v)
{
return arch_atomic_add_return_release(i, v) < 0;
}
#define arch_atomic_add_negative_release arch_atomic_add_negative_release
#endif

#ifndef arch_atomic_add_negative_relaxed
/**
* arch_atomic_add_negative_relaxed - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic_add_negative_relaxed(int i, atomic_t *v)
{
return arch_atomic_add_return_relaxed(i, v) < 0;
}
#define arch_atomic_add_negative_relaxed arch_atomic_add_negative_relaxed
#endif

#else /* arch_atomic_add_negative_relaxed */

#ifndef arch_atomic_add_negative_acquire
static __always_inline bool
arch_atomic_add_negative_acquire(int i, atomic_t *v)
{
bool ret = arch_atomic_add_negative_relaxed(i, v);
__atomic_acquire_fence();
return ret;
}
#define arch_atomic_add_negative_acquire arch_atomic_add_negative_acquire
#endif

#ifndef arch_atomic_add_negative_release
static __always_inline bool
arch_atomic_add_negative_release(int i, atomic_t *v)
{
__atomic_release_fence();
return arch_atomic_add_negative_relaxed(i, v);
}
#define arch_atomic_add_negative_release arch_atomic_add_negative_release
#endif

#ifndef arch_atomic_add_negative
static __always_inline bool
arch_atomic_add_negative(int i, atomic_t *v)
{
bool ret;
__atomic_pre_full_fence();
ret = arch_atomic_add_negative_relaxed(i, v);
__atomic_post_full_fence();
return ret;
}
#define arch_atomic_add_negative arch_atomic_add_negative
#endif

#endif /* arch_atomic_add_negative_relaxed */

#ifndef arch_atomic_fetch_add_unless
/**
* arch_atomic_fetch_add_unless - add unless the number is already a given value
Expand Down Expand Up @@ -2329,15 +2424,21 @@ arch_atomic64_inc_and_test(atomic64_t *v)
#define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
#endif

#ifndef arch_atomic64_add_negative_relaxed
#ifdef arch_atomic64_add_negative
#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative
#define arch_atomic64_add_negative_release arch_atomic64_add_negative
#define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative
#endif /* arch_atomic64_add_negative */

#ifndef arch_atomic64_add_negative
/**
* arch_atomic64_add_negative - add and test if negative
* arch_atomic64_add_negative - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic64_t
*
* Atomically adds @i to @v and returns true
* if the result is negative, or false when
* result is greater than or equal to zero.
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic64_add_negative(s64 i, atomic64_t *v)
Expand All @@ -2347,6 +2448,95 @@ arch_atomic64_add_negative(s64 i, atomic64_t *v)
#define arch_atomic64_add_negative arch_atomic64_add_negative
#endif

#ifndef arch_atomic64_add_negative_acquire
/**
* arch_atomic64_add_negative_acquire - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic64_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v)
{
return arch_atomic64_add_return_acquire(i, v) < 0;
}
#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire
#endif

#ifndef arch_atomic64_add_negative_release
/**
* arch_atomic64_add_negative_release - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic64_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic64_add_negative_release(s64 i, atomic64_t *v)
{
return arch_atomic64_add_return_release(i, v) < 0;
}
#define arch_atomic64_add_negative_release arch_atomic64_add_negative_release
#endif

#ifndef arch_atomic64_add_negative_relaxed
/**
* arch_atomic64_add_negative_relaxed - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic64_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic64_add_negative_relaxed(s64 i, atomic64_t *v)
{
return arch_atomic64_add_return_relaxed(i, v) < 0;
}
#define arch_atomic64_add_negative_relaxed arch_atomic64_add_negative_relaxed
#endif

#else /* arch_atomic64_add_negative_relaxed */

#ifndef arch_atomic64_add_negative_acquire
static __always_inline bool
arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v)
{
bool ret = arch_atomic64_add_negative_relaxed(i, v);
__atomic_acquire_fence();
return ret;
}
#define arch_atomic64_add_negative_acquire arch_atomic64_add_negative_acquire
#endif

#ifndef arch_atomic64_add_negative_release
static __always_inline bool
arch_atomic64_add_negative_release(s64 i, atomic64_t *v)
{
__atomic_release_fence();
return arch_atomic64_add_negative_relaxed(i, v);
}
#define arch_atomic64_add_negative_release arch_atomic64_add_negative_release
#endif

#ifndef arch_atomic64_add_negative
static __always_inline bool
arch_atomic64_add_negative(s64 i, atomic64_t *v)
{
bool ret;
__atomic_pre_full_fence();
ret = arch_atomic64_add_negative_relaxed(i, v);
__atomic_post_full_fence();
return ret;
}
#define arch_atomic64_add_negative arch_atomic64_add_negative
#endif

#endif /* arch_atomic64_add_negative_relaxed */

#ifndef arch_atomic64_fetch_add_unless
/**
* arch_atomic64_fetch_add_unless - add unless the number is already a given value
Expand Down Expand Up @@ -2456,4 +2646,4 @@ arch_atomic64_dec_if_positive(atomic64_t *v)
#endif

#endif /* _LINUX_ATOMIC_FALLBACK_H */
// b5e87bdd5ede61470c29f7a7e4de781af3770f09
// 00071fffa021cec66f6290d706d69c91df87bade
68 changes: 67 additions & 1 deletion include/linux/atomic/atomic-instrumented.h
Original file line number Diff line number Diff line change
Expand Up @@ -592,6 +592,28 @@ atomic_add_negative(int i, atomic_t *v)
return arch_atomic_add_negative(i, v);
}

static __always_inline bool
atomic_add_negative_acquire(int i, atomic_t *v)
{
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic_add_negative_acquire(i, v);
}

static __always_inline bool
atomic_add_negative_release(int i, atomic_t *v)
{
kcsan_release();
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic_add_negative_release(i, v);
}

static __always_inline bool
atomic_add_negative_relaxed(int i, atomic_t *v)
{
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic_add_negative_relaxed(i, v);
}

static __always_inline int
atomic_fetch_add_unless(atomic_t *v, int a, int u)
{
Expand Down Expand Up @@ -1211,6 +1233,28 @@ atomic64_add_negative(s64 i, atomic64_t *v)
return arch_atomic64_add_negative(i, v);
}

static __always_inline bool
atomic64_add_negative_acquire(s64 i, atomic64_t *v)
{
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic64_add_negative_acquire(i, v);
}

static __always_inline bool
atomic64_add_negative_release(s64 i, atomic64_t *v)
{
kcsan_release();
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic64_add_negative_release(i, v);
}

static __always_inline bool
atomic64_add_negative_relaxed(s64 i, atomic64_t *v)
{
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic64_add_negative_relaxed(i, v);
}

static __always_inline s64
atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
{
Expand Down Expand Up @@ -1830,6 +1874,28 @@ atomic_long_add_negative(long i, atomic_long_t *v)
return arch_atomic_long_add_negative(i, v);
}

static __always_inline bool
atomic_long_add_negative_acquire(long i, atomic_long_t *v)
{
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic_long_add_negative_acquire(i, v);
}

static __always_inline bool
atomic_long_add_negative_release(long i, atomic_long_t *v)
{
kcsan_release();
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic_long_add_negative_release(i, v);
}

static __always_inline bool
atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
{
instrument_atomic_read_write(v, sizeof(*v));
return arch_atomic_long_add_negative_relaxed(i, v);
}

static __always_inline long
atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
{
Expand Down Expand Up @@ -2083,4 +2149,4 @@ atomic_long_dec_if_positive(atomic_long_t *v)
})

#endif /* _LINUX_ATOMIC_INSTRUMENTED_H */
// 764f741eb77a7ad565dc8d99ce2837d5542e8aee
// 1b485de9cbaa4900de59e14ee2084357eaeb1c3a
38 changes: 37 additions & 1 deletion include/linux/atomic/atomic-long.h
Original file line number Diff line number Diff line change
Expand Up @@ -479,6 +479,24 @@ arch_atomic_long_add_negative(long i, atomic_long_t *v)
return arch_atomic64_add_negative(i, v);
}

static __always_inline bool
arch_atomic_long_add_negative_acquire(long i, atomic_long_t *v)
{
return arch_atomic64_add_negative_acquire(i, v);
}

static __always_inline bool
arch_atomic_long_add_negative_release(long i, atomic_long_t *v)
{
return arch_atomic64_add_negative_release(i, v);
}

static __always_inline bool
arch_atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
{
return arch_atomic64_add_negative_relaxed(i, v);
}

static __always_inline long
arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
{
Expand Down Expand Up @@ -973,6 +991,24 @@ arch_atomic_long_add_negative(long i, atomic_long_t *v)
return arch_atomic_add_negative(i, v);
}

static __always_inline bool
arch_atomic_long_add_negative_acquire(long i, atomic_long_t *v)
{
return arch_atomic_add_negative_acquire(i, v);
}

static __always_inline bool
arch_atomic_long_add_negative_release(long i, atomic_long_t *v)
{
return arch_atomic_add_negative_release(i, v);
}

static __always_inline bool
arch_atomic_long_add_negative_relaxed(long i, atomic_long_t *v)
{
return arch_atomic_add_negative_relaxed(i, v);
}

static __always_inline long
arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
{
Expand Down Expand Up @@ -1011,4 +1047,4 @@ arch_atomic_long_dec_if_positive(atomic_long_t *v)

#endif /* CONFIG_64BIT */
#endif /* _LINUX_ATOMIC_LONG_H */
// e8f0e08ff072b74d180eabe2ad001282b38c2c88
// a194c07d7d2f4b0e178d3c118c919775d5d65f50
Loading

0 comments on commit b133fff

Please sign in to comment.