Commit 725aea87 authored by Max Filippov's avatar Max Filippov
Browse files

xtensa: enable KCSAN



Prefix arch-specific barrier macros with '__' to make use of instrumented
generic macros.
Prefix arch-specific bitops with 'arch_' to make use of instrumented
generic functions.
Provide stubs for 64-bit atomics when building with KCSAN.
Disable KCSAN instrumentation in arch/xtensa/boot.

Signed-off-by: default avatarMax Filippov <jcmvbkbc@gmail.com>
Acked-by: default avatarMarco Elver <elver@google.com>
parent 50718569
Loading
Loading
Loading
Loading
+1 −0
Original line number Diff line number Diff line
@@ -29,6 +29,7 @@ config XTENSA
	select HAVE_ARCH_AUDITSYSCALL
	select HAVE_ARCH_JUMP_LABEL if !XIP_KERNEL
	select HAVE_ARCH_KASAN if MMU && !XIP_KERNEL
	select HAVE_ARCH_KCSAN
	select HAVE_ARCH_SECCOMP_FILTER
	select HAVE_ARCH_TRACEHOOK
	select HAVE_CONTEXT_TRACKING
+1 −0
Original line number Diff line number Diff line
@@ -16,6 +16,7 @@ CFLAGS_REMOVE_inffast.o = -pg
endif

KASAN_SANITIZE := n
KCSAN_SANITIZE := n

CFLAGS_REMOVE_inflate.o += -fstack-protector -fstack-protector-strong
CFLAGS_REMOVE_zmem.o += -fstack-protector -fstack-protector-strong
+9 −3
Original line number Diff line number Diff line
@@ -11,9 +11,15 @@

#include <asm/core.h>

#define mb()  ({ __asm__ __volatile__("memw" : : : "memory"); })
#define rmb() barrier()
#define wmb() mb()
#define __mb()  ({ __asm__ __volatile__("memw" : : : "memory"); })
#define __rmb() barrier()
#define __wmb() __mb()

#ifdef CONFIG_SMP
#define __smp_mb() __mb()
#define __smp_rmb() __rmb()
#define __smp_wmb() __wmb()
#endif

#if XCHAL_HAVE_S32C1I
#define __smp_mb__before_atomic()		barrier()
+6 −4
Original line number Diff line number Diff line
@@ -99,7 +99,7 @@ static inline unsigned long __fls(unsigned long word)
#if XCHAL_HAVE_EXCLUSIVE

#define BIT_OP(op, insn, inv)						\
static inline void op##_bit(unsigned int bit, volatile unsigned long *p)\
static inline void arch_##op##_bit(unsigned int bit, volatile unsigned long *p)\
{									\
	unsigned long tmp;						\
	unsigned long mask = 1UL << (bit & 31);				\
@@ -119,7 +119,7 @@ static inline void op##_bit(unsigned int bit, volatile unsigned long *p)\

#define TEST_AND_BIT_OP(op, insn, inv)					\
static inline int							\
test_and_##op##_bit(unsigned int bit, volatile unsigned long *p)	\
arch_test_and_##op##_bit(unsigned int bit, volatile unsigned long *p)	\
{									\
	unsigned long tmp, value;					\
	unsigned long mask = 1UL << (bit & 31);				\
@@ -142,7 +142,7 @@ test_and_##op##_bit(unsigned int bit, volatile unsigned long *p) \
#elif XCHAL_HAVE_S32C1I

#define BIT_OP(op, insn, inv)						\
static inline void op##_bit(unsigned int bit, volatile unsigned long *p)\
static inline void arch_##op##_bit(unsigned int bit, volatile unsigned long *p)\
{									\
	unsigned long tmp, value;					\
	unsigned long mask = 1UL << (bit & 31);				\
@@ -163,7 +163,7 @@ static inline void op##_bit(unsigned int bit, volatile unsigned long *p)\

#define TEST_AND_BIT_OP(op, insn, inv)					\
static inline int							\
test_and_##op##_bit(unsigned int bit, volatile unsigned long *p)	\
arch_test_and_##op##_bit(unsigned int bit, volatile unsigned long *p)	\
{									\
	unsigned long tmp, value;					\
	unsigned long mask = 1UL << (bit & 31);				\
@@ -205,6 +205,8 @@ BIT_OPS(change, "xor", )
#undef BIT_OP
#undef TEST_AND_BIT_OP

#include <asm-generic/bitops/instrumented-atomic.h>

#include <asm-generic/bitops/le.h>

#include <asm-generic/bitops/ext2-atomic-setbit.h>
+2 −0
Original line number Diff line number Diff line
@@ -8,3 +8,5 @@ lib-y += memcopy.o memset.o checksum.o \
	   divsi3.o udivsi3.o modsi3.o umodsi3.o mulsi3.o \
	   usercopy.o strncpy_user.o strnlen_user.o
lib-$(CONFIG_PCI) += pci-auto.o
lib-$(CONFIG_KCSAN) += kcsan-stubs.o
KCSAN_SANITIZE_kcsan-stubs.o := n
Loading