Commit b115d85a authored by Linus Torvalds's avatar Linus Torvalds
Browse files

Merge tag 'locking-core-2023-05-05' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip

Pull locking updates from Ingo Molnar:

 - Introduce local{,64}_try_cmpxchg() - a slightly more optimal
   primitive, which will be used in perf events ring-buffer code

 - Simplify/modify rwsems on PREEMPT_RT, to address writer starvation

 - Misc cleanups/fixes

* tag 'locking-core-2023-05-05' of git://git.kernel.org/pub/scm/linux/kernel/git/tip/tip:
  locking/atomic: Correct (cmp)xchg() instrumentation
  locking/x86: Define arch_try_cmpxchg_local()
  locking/arch: Wire up local_try_cmpxchg()
  locking/generic: Wire up local{,64}_try_cmpxchg()
  locking/atomic: Add generic try_cmpxchg{,64}_local() support
  locking/rwbase: Mitigate indefinite writer starvation
  locking/arch: Rename all internal __xchg() names to __arch_xchg()
parents d5ed10bb ec570320
Loading
Loading
Loading
Loading
+5 −5
Original line number Diff line number Diff line
@@ -6,14 +6,14 @@
 * Atomic exchange routines.
 */

#define ____xchg(type, args...)		__xchg ## type ## _local(args)
#define ____xchg(type, args...)		__arch_xchg ## type ## _local(args)
#define ____cmpxchg(type, args...)	__cmpxchg ## type ## _local(args)
#include <asm/xchg.h>

#define xchg_local(ptr, x)						\
({									\
	__typeof__(*(ptr)) _x_ = (x);					\
	(__typeof__(*(ptr))) __xchg_local((ptr), (unsigned long)_x_,	\
	(__typeof__(*(ptr))) __arch_xchg_local((ptr), (unsigned long)_x_,\
					       sizeof(*(ptr)));		\
})

@@ -34,7 +34,7 @@

#undef ____xchg
#undef ____cmpxchg
#define ____xchg(type, args...)		__xchg ##type(args)
#define ____xchg(type, args...)		__arch_xchg ##type(args)
#define ____cmpxchg(type, args...)	__cmpxchg ##type(args)
#include <asm/xchg.h>

@@ -48,7 +48,7 @@
	__typeof__(*(ptr)) _x_ = (x);					\
	smp_mb();							\
	__ret = (__typeof__(*(ptr)))					\
		__xchg((ptr), (unsigned long)_x_, sizeof(*(ptr)));	\
		__arch_xchg((ptr), (unsigned long)_x_, sizeof(*(ptr)));	\
	smp_mb();							\
	__ret;								\
})
+10 −2
Original line number Diff line number Diff line
@@ -52,8 +52,16 @@ static __inline__ long local_sub_return(long i, local_t * l)
	return result;
}

#define local_cmpxchg(l, o, n) \
	(cmpxchg_local(&((l)->a.counter), (o), (n)))
static __inline__ long local_cmpxchg(local_t *l, long old, long new)
{
	return cmpxchg_local(&l->a.counter, old, new);
}

static __inline__ bool local_try_cmpxchg(local_t *l, long *old, long new)
{
	return try_cmpxchg_local(&l->a.counter, (s64 *)old, new);
}

#define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))

/**
+2 −2
Original line number Diff line number Diff line
@@ -85,7 +85,7 @@
 */
#ifdef CONFIG_ARC_HAS_LLSC

#define __xchg(ptr, val)						\
#define __arch_xchg(ptr, val)						\
({									\
	__asm__ __volatile__(						\
	"	ex  %0, [%1]	\n"	/* set new value */	        \
@@ -102,7 +102,7 @@
									\
	switch(sizeof(*(_p_))) {					\
	case 4:								\
		_val_ = __xchg(_p_, _val_);				\
		_val_ = __arch_xchg(_p_, _val_);			\
		break;							\
	default:							\
		BUILD_BUG();						\
+4 −3
Original line number Diff line number Diff line
@@ -25,7 +25,8 @@
#define swp_is_buggy
#endif

static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
static inline unsigned long
__arch_xchg(unsigned long x, volatile void *ptr, int size)
{
	extern void __bad_xchg(volatile void *, int);
	unsigned long ret;
@@ -115,7 +116,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
}

#define arch_xchg_relaxed(ptr, x) ({					\
	(__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr),		\
	(__typeof__(*(ptr)))__arch_xchg((unsigned long)(x), (ptr),	\
					sizeof(*(ptr)));		\
})

+3 −4
Original line number Diff line number Diff line
@@ -62,9 +62,8 @@ __XCHG_CASE( , , mb_, 64, dmb ish, nop, , a, l, "memory")
#undef __XCHG_CASE

#define __XCHG_GEN(sfx)							\
static __always_inline  unsigned long __xchg##sfx(unsigned long x,	\
					volatile void *ptr,		\
					int size)			\
static __always_inline unsigned long					\
__arch_xchg##sfx(unsigned long x, volatile void *ptr, int size)		\
{									\
	switch (size) {							\
	case 1:								\
@@ -93,7 +92,7 @@ __XCHG_GEN(_mb)
({									\
	__typeof__(*(ptr)) __ret;					\
	__ret = (__typeof__(*(ptr)))					\
		__xchg##sfx((unsigned long)(x), (ptr), sizeof(*(ptr))); \
		__arch_xchg##sfx((unsigned long)(x), (ptr), sizeof(*(ptr))); \
	__ret;								\
})

Loading