Skip to content

Commit

Permalink
locking/atomic: remove fallback comments
Browse files Browse the repository at this point in the history
Currently a subset of the fallback templates have kerneldoc comments,
resulting in a haphazard set of generated kerneldoc comments as only
some operations have fallback templates to begin with.

We'd like to generate more consistent kerneldoc comments, and to do so
we'll need to restructure the way the fallback code is generated.

To minimize churn and to make it easier to restructure the fallback
code, this patch removes the existing kerneldoc comments from the
fallback templates. We can add new kerneldoc comments in subsequent
patches.

There should be no functional change as a result of this patch.

Signed-off-by: Mark Rutland <mark.rutland@arm.com>
Signed-off-by: Peter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: Kees Cook <keescook@chromium.org>
Link: https://lore.kernel.org/r/20230605070124.3741859-3-mark.rutland@arm.com
  • Loading branch information
Mark Rutland authored and Peter Zijlstra committed Jun 5, 2023
1 parent dda5f31 commit 14d72d4
Show file tree
Hide file tree
Showing 8 changed files with 1 addition and 223 deletions.
166 changes: 1 addition & 165 deletions include/linux/atomic/atomic-arch-fallback.h
Original file line number Diff line number Diff line change
Expand Up @@ -1272,15 +1272,6 @@ arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
#endif /* arch_atomic_try_cmpxchg_relaxed */

#ifndef arch_atomic_sub_and_test
/**
* arch_atomic_sub_and_test - subtract value from variable and test result
* @i: integer value to subtract
* @v: pointer of type atomic_t
*
* Atomically subtracts @i from @v and returns
* true if the result is zero, or false for all
* other cases.
*/
static __always_inline bool
arch_atomic_sub_and_test(int i, atomic_t *v)
{
Expand All @@ -1290,14 +1281,6 @@ arch_atomic_sub_and_test(int i, atomic_t *v)
#endif

#ifndef arch_atomic_dec_and_test
/**
* arch_atomic_dec_and_test - decrement and test
* @v: pointer of type atomic_t
*
* Atomically decrements @v by 1 and
* returns true if the result is 0, or false for all other
* cases.
*/
static __always_inline bool
arch_atomic_dec_and_test(atomic_t *v)
{
Expand All @@ -1307,14 +1290,6 @@ arch_atomic_dec_and_test(atomic_t *v)
#endif

#ifndef arch_atomic_inc_and_test
/**
* arch_atomic_inc_and_test - increment and test
* @v: pointer of type atomic_t
*
* Atomically increments @v by 1
* and returns true if the result is zero, or false for all
* other cases.
*/
static __always_inline bool
arch_atomic_inc_and_test(atomic_t *v)
{
Expand All @@ -1331,14 +1306,6 @@ arch_atomic_inc_and_test(atomic_t *v)
#endif /* arch_atomic_add_negative */

#ifndef arch_atomic_add_negative
/**
* arch_atomic_add_negative - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic_add_negative(int i, atomic_t *v)
{
Expand All @@ -1348,14 +1315,6 @@ arch_atomic_add_negative(int i, atomic_t *v)
#endif

#ifndef arch_atomic_add_negative_acquire
/**
* arch_atomic_add_negative_acquire - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic_add_negative_acquire(int i, atomic_t *v)
{
Expand All @@ -1365,14 +1324,6 @@ arch_atomic_add_negative_acquire(int i, atomic_t *v)
#endif

#ifndef arch_atomic_add_negative_release
/**
* arch_atomic_add_negative_release - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic_add_negative_release(int i, atomic_t *v)
{
Expand All @@ -1382,14 +1333,6 @@ arch_atomic_add_negative_release(int i, atomic_t *v)
#endif

#ifndef arch_atomic_add_negative_relaxed
/**
* arch_atomic_add_negative_relaxed - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic_add_negative_relaxed(int i, atomic_t *v)
{
Expand Down Expand Up @@ -1437,15 +1380,6 @@ arch_atomic_add_negative(int i, atomic_t *v)
#endif /* arch_atomic_add_negative_relaxed */

#ifndef arch_atomic_fetch_add_unless
/**
* arch_atomic_fetch_add_unless - add unless the number is already a given value
* @v: pointer of type atomic_t
* @a: the amount to add to v...
* @u: ...unless v is equal to u.
*
* Atomically adds @a to @v, so long as @v was not already @u.
* Returns original value of @v
*/
static __always_inline int
arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
{
Expand All @@ -1462,15 +1396,6 @@ arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
#endif

#ifndef arch_atomic_add_unless
/**
* arch_atomic_add_unless - add unless the number is already a given value
* @v: pointer of type atomic_t
* @a: the amount to add to v...
* @u: ...unless v is equal to u.
*
* Atomically adds @a to @v, if @v was not already @u.
* Returns true if the addition was done.
*/
static __always_inline bool
arch_atomic_add_unless(atomic_t *v, int a, int u)
{
Expand All @@ -1480,13 +1405,6 @@ arch_atomic_add_unless(atomic_t *v, int a, int u)
#endif

#ifndef arch_atomic_inc_not_zero
/**
* arch_atomic_inc_not_zero - increment unless the number is zero
* @v: pointer of type atomic_t
*
* Atomically increments @v by 1, if @v is non-zero.
* Returns true if the increment was done.
*/
static __always_inline bool
arch_atomic_inc_not_zero(atomic_t *v)
{
Expand Down Expand Up @@ -2488,15 +2406,6 @@ arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
#endif /* arch_atomic64_try_cmpxchg_relaxed */

#ifndef arch_atomic64_sub_and_test
/**
* arch_atomic64_sub_and_test - subtract value from variable and test result
* @i: integer value to subtract
* @v: pointer of type atomic64_t
*
* Atomically subtracts @i from @v and returns
* true if the result is zero, or false for all
* other cases.
*/
static __always_inline bool
arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
{
Expand All @@ -2506,14 +2415,6 @@ arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
#endif

#ifndef arch_atomic64_dec_and_test
/**
* arch_atomic64_dec_and_test - decrement and test
* @v: pointer of type atomic64_t
*
* Atomically decrements @v by 1 and
* returns true if the result is 0, or false for all other
* cases.
*/
static __always_inline bool
arch_atomic64_dec_and_test(atomic64_t *v)
{
Expand All @@ -2523,14 +2424,6 @@ arch_atomic64_dec_and_test(atomic64_t *v)
#endif

#ifndef arch_atomic64_inc_and_test
/**
* arch_atomic64_inc_and_test - increment and test
* @v: pointer of type atomic64_t
*
* Atomically increments @v by 1
* and returns true if the result is zero, or false for all
* other cases.
*/
static __always_inline bool
arch_atomic64_inc_and_test(atomic64_t *v)
{
Expand All @@ -2547,14 +2440,6 @@ arch_atomic64_inc_and_test(atomic64_t *v)
#endif /* arch_atomic64_add_negative */

#ifndef arch_atomic64_add_negative
/**
* arch_atomic64_add_negative - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic64_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic64_add_negative(s64 i, atomic64_t *v)
{
Expand All @@ -2564,14 +2449,6 @@ arch_atomic64_add_negative(s64 i, atomic64_t *v)
#endif

#ifndef arch_atomic64_add_negative_acquire
/**
* arch_atomic64_add_negative_acquire - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic64_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v)
{
Expand All @@ -2581,14 +2458,6 @@ arch_atomic64_add_negative_acquire(s64 i, atomic64_t *v)
#endif

#ifndef arch_atomic64_add_negative_release
/**
* arch_atomic64_add_negative_release - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic64_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic64_add_negative_release(s64 i, atomic64_t *v)
{
Expand All @@ -2598,14 +2467,6 @@ arch_atomic64_add_negative_release(s64 i, atomic64_t *v)
#endif

#ifndef arch_atomic64_add_negative_relaxed
/**
* arch_atomic64_add_negative_relaxed - Add and test if negative
* @i: integer value to add
* @v: pointer of type atomic64_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_atomic64_add_negative_relaxed(s64 i, atomic64_t *v)
{
Expand Down Expand Up @@ -2653,15 +2514,6 @@ arch_atomic64_add_negative(s64 i, atomic64_t *v)
#endif /* arch_atomic64_add_negative_relaxed */

#ifndef arch_atomic64_fetch_add_unless
/**
* arch_atomic64_fetch_add_unless - add unless the number is already a given value
* @v: pointer of type atomic64_t
* @a: the amount to add to v...
* @u: ...unless v is equal to u.
*
* Atomically adds @a to @v, so long as @v was not already @u.
* Returns original value of @v
*/
static __always_inline s64
arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
{
Expand All @@ -2678,15 +2530,6 @@ arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
#endif

#ifndef arch_atomic64_add_unless
/**
* arch_atomic64_add_unless - add unless the number is already a given value
* @v: pointer of type atomic64_t
* @a: the amount to add to v...
* @u: ...unless v is equal to u.
*
* Atomically adds @a to @v, if @v was not already @u.
* Returns true if the addition was done.
*/
static __always_inline bool
arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
{
Expand All @@ -2696,13 +2539,6 @@ arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
#endif

#ifndef arch_atomic64_inc_not_zero
/**
* arch_atomic64_inc_not_zero - increment unless the number is zero
* @v: pointer of type atomic64_t
*
* Atomically increments @v by 1, if @v is non-zero.
* Returns true if the increment was done.
*/
static __always_inline bool
arch_atomic64_inc_not_zero(atomic64_t *v)
{
Expand Down Expand Up @@ -2761,4 +2597,4 @@ arch_atomic64_dec_if_positive(atomic64_t *v)
#endif

#endif /* _LINUX_ATOMIC_FALLBACK_H */
// 52dfc6fe4a2e7234bbd2aa3e16a377c1db793a53
// 9f0fd6ed53267c6ec64e36cd18e6fd8df57ea277
8 changes: 0 additions & 8 deletions scripts/atomic/fallbacks/add_negative
Original file line number Diff line number Diff line change
@@ -1,12 +1,4 @@
cat <<EOF
/**
* arch_${atomic}_add_negative${order} - Add and test if negative
* @i: integer value to add
* @v: pointer of type ${atomic}_t
*
* Atomically adds @i to @v and returns true if the result is negative,
* or false when the result is greater than or equal to zero.
*/
static __always_inline bool
arch_${atomic}_add_negative${order}(${int} i, ${atomic}_t *v)
{
Expand Down
9 changes: 0 additions & 9 deletions scripts/atomic/fallbacks/add_unless
Original file line number Diff line number Diff line change
@@ -1,13 +1,4 @@
cat << EOF
/**
* arch_${atomic}_add_unless - add unless the number is already a given value
* @v: pointer of type ${atomic}_t
* @a: the amount to add to v...
* @u: ...unless v is equal to u.
*
* Atomically adds @a to @v, if @v was not already @u.
* Returns true if the addition was done.
*/
static __always_inline bool
arch_${atomic}_add_unless(${atomic}_t *v, ${int} a, ${int} u)
{
Expand Down
8 changes: 0 additions & 8 deletions scripts/atomic/fallbacks/dec_and_test
Original file line number Diff line number Diff line change
@@ -1,12 +1,4 @@
cat <<EOF
/**
* arch_${atomic}_dec_and_test - decrement and test
* @v: pointer of type ${atomic}_t
*
* Atomically decrements @v by 1 and
* returns true if the result is 0, or false for all other
* cases.
*/
static __always_inline bool
arch_${atomic}_dec_and_test(${atomic}_t *v)
{
Expand Down
9 changes: 0 additions & 9 deletions scripts/atomic/fallbacks/fetch_add_unless
Original file line number Diff line number Diff line change
@@ -1,13 +1,4 @@
cat << EOF
/**
* arch_${atomic}_fetch_add_unless - add unless the number is already a given value
* @v: pointer of type ${atomic}_t
* @a: the amount to add to v...
* @u: ...unless v is equal to u.
*
* Atomically adds @a to @v, so long as @v was not already @u.
* Returns original value of @v
*/
static __always_inline ${int}
arch_${atomic}_fetch_add_unless(${atomic}_t *v, ${int} a, ${int} u)
{
Expand Down
8 changes: 0 additions & 8 deletions scripts/atomic/fallbacks/inc_and_test
Original file line number Diff line number Diff line change
@@ -1,12 +1,4 @@
cat <<EOF
/**
* arch_${atomic}_inc_and_test - increment and test
* @v: pointer of type ${atomic}_t
*
* Atomically increments @v by 1
* and returns true if the result is zero, or false for all
* other cases.
*/
static __always_inline bool
arch_${atomic}_inc_and_test(${atomic}_t *v)
{
Expand Down
7 changes: 0 additions & 7 deletions scripts/atomic/fallbacks/inc_not_zero
Original file line number Diff line number Diff line change
@@ -1,11 +1,4 @@
cat <<EOF
/**
* arch_${atomic}_inc_not_zero - increment unless the number is zero
* @v: pointer of type ${atomic}_t
*
* Atomically increments @v by 1, if @v is non-zero.
* Returns true if the increment was done.
*/
static __always_inline bool
arch_${atomic}_inc_not_zero(${atomic}_t *v)
{
Expand Down
Loading

0 comments on commit 14d72d4

Please sign in to comment.