Loading arch/s390/Kconfig +1 −0 Original line number Diff line number Diff line Loading @@ -96,6 +96,7 @@ config S390 select HAVE_MEMBLOCK select HAVE_MEMBLOCK_NODE_MAP select HAVE_CMPXCHG_LOCAL select HAVE_CMPXCHG_DOUBLE select ARCH_DISCARD_MEMBLOCK select BUILDTIME_EXTABLE_SORT select ARCH_INLINE_SPIN_TRYLOCK Loading arch/s390/include/asm/cmpxchg.h +61 −0 Original line number Diff line number Diff line Loading @@ -7,7 +7,9 @@ #ifndef __ASM_CMPXCHG_H #define __ASM_CMPXCHG_H #include <linux/mmdebug.h> #include <linux/types.h> #include <linux/bug.h> extern void __xchg_called_with_bad_pointer(void); Loading Loading @@ -203,6 +205,65 @@ static inline unsigned long long __cmpxchg64(void *ptr, }) #endif /* CONFIG_64BIT */ #define __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, insn) \ ({ \ register __typeof__(*(p1)) __old1 asm("2") = (o1); \ register __typeof__(*(p2)) __old2 asm("3") = (o2); \ register __typeof__(*(p1)) __new1 asm("4") = (n1); \ register __typeof__(*(p2)) __new2 asm("5") = (n2); \ int cc; \ asm volatile( \ insn " %[old],%[new],%[ptr]\n" \ " ipm %[cc]\n" \ " srl %[cc],28" \ : [cc] "=d" (cc), [old] "+d" (__old1), "+d" (__old2) \ : [new] "d" (__new1), "d" (__new2), \ [ptr] "Q" (*(p1)), "Q" (*(p2)) \ : "memory", "cc"); \ !cc; \ }) #define __cmpxchg_double_4(p1, p2, o1, o2, n1, n2) \ __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cds") #define __cmpxchg_double_8(p1, p2, o1, o2, n1, n2) \ __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cdsg") extern void __cmpxchg_double_called_with_bad_pointer(void); #define __cmpxchg_double(p1, p2, o1, o2, n1, n2) \ ({ \ int __ret; \ switch (sizeof(*(p1))) { \ case 4: \ __ret = __cmpxchg_double_4(p1, p2, o1, o2, n1, n2); \ break; \ case 8: \ __ret = __cmpxchg_double_8(p1, p2, o1, o2, n1, n2); \ break; \ default: \ __cmpxchg_double_called_with_bad_pointer(); \ } \ __ret; \ }) #define cmpxchg_double(p1, p2, o1, o2, n1, n2) \ ({ \ __typeof__(p1) __p1 = (p1); \ __typeof__(p2) __p2 = (p2); \ int __ret; \ BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long)); \ BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long)); \ VM_BUG_ON((unsigned long)((__p1) + 1) != (unsigned long)(__p2));\ if (sizeof(long) == 4) \ __ret = __cmpxchg_double_4(__p1, __p2, o1, o2, n1, n2); \ else \ __ret = __cmpxchg_double_8(__p1, __p2, o1, o2, n1, n2); \ __ret; \ }) #define system_has_cmpxchg_double() 1 #include <asm-generic/cmpxchg-local.h> static inline unsigned long __cmpxchg_local(void *ptr, Loading arch/s390/include/asm/percpu.h +21 −1 Original line number Diff line number Diff line Loading @@ -108,6 +108,26 @@ #define this_cpu_xchg_8(pcp, nval) arch_this_cpu_xchg(pcp, nval) #endif #define arch_this_cpu_cmpxchg_double(pcp1, pcp2, o1, o2, n1, n2) \ ({ \ typeof(pcp1) o1__ = (o1), n1__ = (n1); \ typeof(pcp2) o2__ = (o2), n2__ = (n2); \ typeof(pcp1) *p1__; \ typeof(pcp2) *p2__; \ int ret__; \ preempt_disable(); \ p1__ = __this_cpu_ptr(&(pcp1)); \ p2__ = __this_cpu_ptr(&(pcp2)); \ ret__ = __cmpxchg_double(p1__, p2__, o1__, o2__, n1__, n2__); \ preempt_enable(); \ ret__; \ }) #define this_cpu_cmpxchg_double_4 arch_this_cpu_cmpxchg_double #ifdef CONFIG_64BIT #define this_cpu_cmpxchg_double_8 arch_this_cpu_cmpxchg_double #endif #include <asm-generic/percpu.h> #endif /* __ARCH_S390_PERCPU__ */ Loading
arch/s390/Kconfig +1 −0 Original line number Diff line number Diff line Loading @@ -96,6 +96,7 @@ config S390 select HAVE_MEMBLOCK select HAVE_MEMBLOCK_NODE_MAP select HAVE_CMPXCHG_LOCAL select HAVE_CMPXCHG_DOUBLE select ARCH_DISCARD_MEMBLOCK select BUILDTIME_EXTABLE_SORT select ARCH_INLINE_SPIN_TRYLOCK Loading
arch/s390/include/asm/cmpxchg.h +61 −0 Original line number Diff line number Diff line Loading @@ -7,7 +7,9 @@ #ifndef __ASM_CMPXCHG_H #define __ASM_CMPXCHG_H #include <linux/mmdebug.h> #include <linux/types.h> #include <linux/bug.h> extern void __xchg_called_with_bad_pointer(void); Loading Loading @@ -203,6 +205,65 @@ static inline unsigned long long __cmpxchg64(void *ptr, }) #endif /* CONFIG_64BIT */ #define __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, insn) \ ({ \ register __typeof__(*(p1)) __old1 asm("2") = (o1); \ register __typeof__(*(p2)) __old2 asm("3") = (o2); \ register __typeof__(*(p1)) __new1 asm("4") = (n1); \ register __typeof__(*(p2)) __new2 asm("5") = (n2); \ int cc; \ asm volatile( \ insn " %[old],%[new],%[ptr]\n" \ " ipm %[cc]\n" \ " srl %[cc],28" \ : [cc] "=d" (cc), [old] "+d" (__old1), "+d" (__old2) \ : [new] "d" (__new1), "d" (__new2), \ [ptr] "Q" (*(p1)), "Q" (*(p2)) \ : "memory", "cc"); \ !cc; \ }) #define __cmpxchg_double_4(p1, p2, o1, o2, n1, n2) \ __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cds") #define __cmpxchg_double_8(p1, p2, o1, o2, n1, n2) \ __cmpxchg_double_op(p1, p2, o1, o2, n1, n2, "cdsg") extern void __cmpxchg_double_called_with_bad_pointer(void); #define __cmpxchg_double(p1, p2, o1, o2, n1, n2) \ ({ \ int __ret; \ switch (sizeof(*(p1))) { \ case 4: \ __ret = __cmpxchg_double_4(p1, p2, o1, o2, n1, n2); \ break; \ case 8: \ __ret = __cmpxchg_double_8(p1, p2, o1, o2, n1, n2); \ break; \ default: \ __cmpxchg_double_called_with_bad_pointer(); \ } \ __ret; \ }) #define cmpxchg_double(p1, p2, o1, o2, n1, n2) \ ({ \ __typeof__(p1) __p1 = (p1); \ __typeof__(p2) __p2 = (p2); \ int __ret; \ BUILD_BUG_ON(sizeof(*(p1)) != sizeof(long)); \ BUILD_BUG_ON(sizeof(*(p2)) != sizeof(long)); \ VM_BUG_ON((unsigned long)((__p1) + 1) != (unsigned long)(__p2));\ if (sizeof(long) == 4) \ __ret = __cmpxchg_double_4(__p1, __p2, o1, o2, n1, n2); \ else \ __ret = __cmpxchg_double_8(__p1, __p2, o1, o2, n1, n2); \ __ret; \ }) #define system_has_cmpxchg_double() 1 #include <asm-generic/cmpxchg-local.h> static inline unsigned long __cmpxchg_local(void *ptr, Loading
arch/s390/include/asm/percpu.h +21 −1 Original line number Diff line number Diff line Loading @@ -108,6 +108,26 @@ #define this_cpu_xchg_8(pcp, nval) arch_this_cpu_xchg(pcp, nval) #endif #define arch_this_cpu_cmpxchg_double(pcp1, pcp2, o1, o2, n1, n2) \ ({ \ typeof(pcp1) o1__ = (o1), n1__ = (n1); \ typeof(pcp2) o2__ = (o2), n2__ = (n2); \ typeof(pcp1) *p1__; \ typeof(pcp2) *p2__; \ int ret__; \ preempt_disable(); \ p1__ = __this_cpu_ptr(&(pcp1)); \ p2__ = __this_cpu_ptr(&(pcp2)); \ ret__ = __cmpxchg_double(p1__, p2__, o1__, o2__, n1__, n2__); \ preempt_enable(); \ ret__; \ }) #define this_cpu_cmpxchg_double_4 arch_this_cpu_cmpxchg_double #ifdef CONFIG_64BIT #define this_cpu_cmpxchg_double_8 arch_this_cpu_cmpxchg_double #endif #include <asm-generic/percpu.h> #endif /* __ARCH_S390_PERCPU__ */