Commit d12157ef authored by Mark Rutland's avatar Mark Rutland Committed by Peter Zijlstra
Browse files

locking/atomic: make atomic*_{cmp,}xchg optional



Most architectures define the atomic/atomic64 xchg and cmpxchg
operations in terms of arch_xchg and arch_cmpxchg respectfully.

Add fallbacks for these cases and remove the trivial cases from arch
code. On some architectures the existing definitions are kept as these
are used to build other arch_atomic*() operations.

Signed-off-by: default avatarMark Rutland <mark.rutland@arm.com>
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: default avatarKees Cook <keescook@chromium.org>
Link: https://lore.kernel.org/r/20230605070124.3741859-5-mark.rutland@arm.com
parent a7bafa79
Loading
Loading
Loading
Loading
+0 −10
Original line number Diff line number Diff line
@@ -200,16 +200,6 @@ ATOMIC_OPS(xor, xor)
#undef ATOMIC_OP_RETURN
#undef ATOMIC_OP

#define arch_atomic64_cmpxchg(v, old, new) \
	(arch_cmpxchg(&((v)->counter), old, new))
#define arch_atomic64_xchg(v, new) \
	(arch_xchg(&((v)->counter), new))

#define arch_atomic_cmpxchg(v, old, new) \
	(arch_cmpxchg(&((v)->counter), old, new))
#define arch_atomic_xchg(v, new) \
	(arch_xchg(&((v)->counter), new))

/**
 * arch_atomic_fetch_add_unless - add unless the number is a given value
 * @v: pointer of type atomic_t
+0 −24
Original line number Diff line number Diff line
@@ -22,30 +22,6 @@
#include <asm/atomic-spinlock.h>
#endif

#define arch_atomic_cmpxchg(v, o, n)					\
({									\
	arch_cmpxchg(&((v)->counter), (o), (n));			\
})

#ifdef arch_cmpxchg_relaxed
#define arch_atomic_cmpxchg_relaxed(v, o, n)				\
({									\
	arch_cmpxchg_relaxed(&((v)->counter), (o), (n));		\
})
#endif

#define arch_atomic_xchg(v, n)						\
({									\
	arch_xchg(&((v)->counter), (n));				\
})

#ifdef arch_xchg_relaxed
#define arch_atomic_xchg_relaxed(v, n)					\
({									\
	arch_xchg_relaxed(&((v)->counter), (n));			\
})
#endif

/*
 * 64-bit atomics
 */
+2 −0
Original line number Diff line number Diff line
@@ -159,6 +159,7 @@ arch_atomic64_cmpxchg(atomic64_t *ptr, s64 expected, s64 new)

	return prev;
}
#define arch_atomic64_cmpxchg arch_atomic64_cmpxchg

static inline s64 arch_atomic64_xchg(atomic64_t *ptr, s64 new)
{
@@ -179,6 +180,7 @@ static inline s64 arch_atomic64_xchg(atomic64_t *ptr, s64 new)

	return prev;
}
#define arch_atomic64_xchg arch_atomic64_xchg

/**
 * arch_atomic64_dec_if_positive - decrement by 1 if old value positive
+1 −2
Original line number Diff line number Diff line
@@ -210,6 +210,7 @@ static inline int arch_atomic_cmpxchg(atomic_t *v, int old, int new)

	return ret;
}
#define arch_atomic_cmpxchg arch_atomic_cmpxchg

#define arch_atomic_fetch_andnot		arch_atomic_fetch_andnot

@@ -240,8 +241,6 @@ ATOMIC_OPS(xor, ^=, eor)
#undef ATOMIC_OP_RETURN
#undef ATOMIC_OP

#define arch_atomic_xchg(v, new) (arch_xchg(&((v)->counter), new))

#ifndef CONFIG_GENERIC_ATOMIC64
typedef struct {
	s64 counter;
+0 −28
Original line number Diff line number Diff line
@@ -142,24 +142,6 @@ static __always_inline long arch_atomic64_dec_if_positive(atomic64_t *v)
#define arch_atomic_fetch_xor_release		arch_atomic_fetch_xor_release
#define arch_atomic_fetch_xor			arch_atomic_fetch_xor

#define arch_atomic_xchg_relaxed(v, new) \
	arch_xchg_relaxed(&((v)->counter), (new))
#define arch_atomic_xchg_acquire(v, new) \
	arch_xchg_acquire(&((v)->counter), (new))
#define arch_atomic_xchg_release(v, new) \
	arch_xchg_release(&((v)->counter), (new))
#define arch_atomic_xchg(v, new) \
	arch_xchg(&((v)->counter), (new))

#define arch_atomic_cmpxchg_relaxed(v, old, new) \
	arch_cmpxchg_relaxed(&((v)->counter), (old), (new))
#define arch_atomic_cmpxchg_acquire(v, old, new) \
	arch_cmpxchg_acquire(&((v)->counter), (old), (new))
#define arch_atomic_cmpxchg_release(v, old, new) \
	arch_cmpxchg_release(&((v)->counter), (old), (new))
#define arch_atomic_cmpxchg(v, old, new) \
	arch_cmpxchg(&((v)->counter), (old), (new))

#define arch_atomic_andnot			arch_atomic_andnot

/*
@@ -209,16 +191,6 @@ static __always_inline long arch_atomic64_dec_if_positive(atomic64_t *v)
#define arch_atomic64_fetch_xor_release		arch_atomic64_fetch_xor_release
#define arch_atomic64_fetch_xor			arch_atomic64_fetch_xor

#define arch_atomic64_xchg_relaxed		arch_atomic_xchg_relaxed
#define arch_atomic64_xchg_acquire		arch_atomic_xchg_acquire
#define arch_atomic64_xchg_release		arch_atomic_xchg_release
#define arch_atomic64_xchg			arch_atomic_xchg

#define arch_atomic64_cmpxchg_relaxed		arch_atomic_cmpxchg_relaxed
#define arch_atomic64_cmpxchg_acquire		arch_atomic_cmpxchg_acquire
#define arch_atomic64_cmpxchg_release		arch_atomic_cmpxchg_release
#define arch_atomic64_cmpxchg			arch_atomic_cmpxchg

#define arch_atomic64_andnot			arch_atomic64_andnot

#define arch_atomic64_dec_if_positive		arch_atomic64_dec_if_positive
Loading