Commit 06855063 authored by Andrzej Hajda's avatar Andrzej Hajda Committed by Ingo Molnar
Browse files

locking/arch: Rename all internal __xchg() names to __arch_xchg()



Decrease the probability of this internal facility to be used by
driver code.

Signed-off-by: default avatarAndrzej Hajda <andrzej.hajda@intel.com>
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: default avatarIngo Molnar <mingo@kernel.org>
Reviewed-by: default avatarArnd Bergmann <arnd@arndb.de>
Reviewed-by: default avatarAndi Shyti <andi.shyti@linux.intel.com>
Acked-by: Geert Uytterhoeven <geert@linux-m68k.org> [m68k]
Acked-by: Palmer Dabbelt <palmer@rivosinc.com> [riscv]
Link: https://lore.kernel.org/r/20230118154450.73842-1-andrzej.hajda@intel.com
Cc: Linus Torvalds <torvalds@linux-foundation.org>
parent ee1ee6db
Loading
Loading
Loading
Loading
+5 −5
Original line number Diff line number Diff line
@@ -6,14 +6,14 @@
 * Atomic exchange routines.
 */

#define ____xchg(type, args...)		__xchg ## type ## _local(args)
#define ____xchg(type, args...)		__arch_xchg ## type ## _local(args)
#define ____cmpxchg(type, args...)	__cmpxchg ## type ## _local(args)
#include <asm/xchg.h>

#define xchg_local(ptr, x)						\
({									\
	__typeof__(*(ptr)) _x_ = (x);					\
	(__typeof__(*(ptr))) __xchg_local((ptr), (unsigned long)_x_,	\
	(__typeof__(*(ptr))) __arch_xchg_local((ptr), (unsigned long)_x_,\
					       sizeof(*(ptr)));		\
})

@@ -34,7 +34,7 @@

#undef ____xchg
#undef ____cmpxchg
#define ____xchg(type, args...)		__xchg ##type(args)
#define ____xchg(type, args...)		__arch_xchg ##type(args)
#define ____cmpxchg(type, args...)	__cmpxchg ##type(args)
#include <asm/xchg.h>

@@ -48,7 +48,7 @@
	__typeof__(*(ptr)) _x_ = (x);					\
	smp_mb();							\
	__ret = (__typeof__(*(ptr)))					\
		__xchg((ptr), (unsigned long)_x_, sizeof(*(ptr)));	\
		__arch_xchg((ptr), (unsigned long)_x_, sizeof(*(ptr)));	\
	smp_mb();							\
	__ret;								\
})
+2 −2
Original line number Diff line number Diff line
@@ -85,7 +85,7 @@
 */
#ifdef CONFIG_ARC_HAS_LLSC

#define __xchg(ptr, val)						\
#define __arch_xchg(ptr, val)						\
({									\
	__asm__ __volatile__(						\
	"	ex  %0, [%1]	\n"	/* set new value */	        \
@@ -102,7 +102,7 @@
									\
	switch(sizeof(*(_p_))) {					\
	case 4:								\
		_val_ = __xchg(_p_, _val_);				\
		_val_ = __arch_xchg(_p_, _val_);			\
		break;							\
	default:							\
		BUILD_BUG();						\
+4 −3
Original line number Diff line number Diff line
@@ -25,7 +25,8 @@
#define swp_is_buggy
#endif

static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
static inline unsigned long
__arch_xchg(unsigned long x, volatile void *ptr, int size)
{
	extern void __bad_xchg(volatile void *, int);
	unsigned long ret;
@@ -115,7 +116,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
}

#define arch_xchg_relaxed(ptr, x) ({					\
	(__typeof__(*(ptr)))__xchg((unsigned long)(x), (ptr),		\
	(__typeof__(*(ptr)))__arch_xchg((unsigned long)(x), (ptr),	\
					sizeof(*(ptr)));		\
})

+3 −4
Original line number Diff line number Diff line
@@ -62,9 +62,8 @@ __XCHG_CASE( , , mb_, 64, dmb ish, nop, , a, l, "memory")
#undef __XCHG_CASE

#define __XCHG_GEN(sfx)							\
static __always_inline  unsigned long __xchg##sfx(unsigned long x,	\
					volatile void *ptr,		\
					int size)			\
static __always_inline unsigned long					\
__arch_xchg##sfx(unsigned long x, volatile void *ptr, int size)		\
{									\
	switch (size) {							\
	case 1:								\
@@ -93,7 +92,7 @@ __XCHG_GEN(_mb)
({									\
	__typeof__(*(ptr)) __ret;					\
	__ret = (__typeof__(*(ptr)))					\
		__xchg##sfx((unsigned long)(x), (ptr), sizeof(*(ptr))); \
		__arch_xchg##sfx((unsigned long)(x), (ptr), sizeof(*(ptr))); \
	__ret;								\
})

+5 −5
Original line number Diff line number Diff line
@@ -9,7 +9,7 @@
#define _ASM_CMPXCHG_H

/*
 * __xchg - atomically exchange a register and a memory location
 * __arch_xchg - atomically exchange a register and a memory location
 * @x: value to swap
 * @ptr: pointer to memory
 * @size:  size of the value
@@ -19,8 +19,8 @@
 * Note:  there was an errata for V2 about .new's and memw_locked.
 *
 */
static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
				   int size)
static inline unsigned long
__arch_xchg(unsigned long x, volatile void *ptr, int size)
{
	unsigned long retval;

@@ -42,7 +42,7 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr,
 * Atomically swap the contents of a register with memory.  Should be atomic
 * between multiple CPU's and within interrupts on the same CPU.
 */
#define arch_xchg(ptr, v) ((__typeof__(*(ptr)))__xchg((unsigned long)(v), (ptr), \
#define arch_xchg(ptr, v) ((__typeof__(*(ptr)))__arch_xchg((unsigned long)(v), (ptr), \
							   sizeof(*(ptr))))

/*
Loading