Commit 2cb34276 authored by Andrey Konovalov's avatar Andrey Konovalov Committed by Linus Torvalds
Browse files

arm64: kasan: simplify and inline MTE functions

This change provides a simpler implementation of mte_get_mem_tag(),
mte_get_random_tag(), and mte_set_mem_tag_range().

Simplifications include removing system_supports_mte() checks as these
functions are onlye called from KASAN runtime that had already checked
system_supports_mte().  Besides that, size and address alignment checks
are removed from mte_set_mem_tag_range(), as KASAN now does those.

This change also moves these functions into the asm/mte-kasan.h header and
implements mte_set_mem_tag_range() via inline assembly to avoid
unnecessary functions calls.

[vincenzo.frascino@arm.com: fix warning in mte_get_random_tag()]
  Link: https://lkml.kernel.org/r/20210211152208.23811-1-vincenzo.frascino@arm.com

Link: https://lkml.kernel.org/r/a26121b294fdf76e369cb7a74351d1c03a908930.1612546384.git.andreyknvl@google.com


Co-developed-by: default avatarVincenzo Frascino <vincenzo.frascino@arm.com>
Signed-off-by: default avatarVincenzo Frascino <vincenzo.frascino@arm.com>
Signed-off-by: default avatarAndrey Konovalov <andreyknvl@google.com>
Reviewed-by: default avatarCatalin Marinas <catalin.marinas@arm.com>
Cc: Alexander Potapenko <glider@google.com>
Cc: Andrey Ryabinin <aryabinin@virtuozzo.com>
Cc: Branislav Rankov <Branislav.Rankov@arm.com>
Cc: Dmitry Vyukov <dvyukov@google.com>
Cc: Evgenii Stepanov <eugenis@google.com>
Cc: Kevin Brodsky <kevin.brodsky@arm.com>
Cc: Marco Elver <elver@google.com>
Cc: Peter Collingbourne <pcc@google.com>
Cc: Will Deacon <will.deacon@arm.com>
Signed-off-by: default avatarAndrew Morton <akpm@linux-foundation.org>
Signed-off-by: default avatarLinus Torvalds <torvalds@linux-foundation.org>
parent cde8a7eb
Loading
Loading
Loading
Loading
+0 −1
Original line number Diff line number Diff line
@@ -6,7 +6,6 @@
#define __ASM_CACHE_H

#include <asm/cputype.h>
#include <asm/mte-kasan.h>

#define CTR_L1IP_SHIFT		14
#define CTR_L1IP_MASK		3
+1 −0
Original line number Diff line number Diff line
@@ -6,6 +6,7 @@

#include <linux/linkage.h>
#include <asm/memory.h>
#include <asm/mte-kasan.h>
#include <asm/pgtable-types.h>

#define arch_kasan_set_tag(addr, tag)	__tag_set(addr, tag)
+2 −0
Original line number Diff line number Diff line
@@ -11,4 +11,6 @@
#define MTE_TAG_SIZE		4
#define MTE_TAG_MASK		GENMASK((MTE_TAG_SHIFT + (MTE_TAG_SIZE - 1)), MTE_TAG_SHIFT)

#define __MTE_PREAMBLE		ARM64_ASM_PREAMBLE ".arch_extension memtag\n"

#endif /* __ASM_MTE_DEF_H  */
+57 −8
Original line number Diff line number Diff line
@@ -11,11 +11,14 @@

#include <linux/types.h>

#ifdef CONFIG_ARM64_MTE

/*
 * The functions below are meant to be used only for the
 * KASAN_HW_TAGS interface defined in asm/memory.h.
 * These functions are meant to be only used from KASAN runtime through
 * the arch_*() interface defined in asm/memory.h.
 * These functions don't include system_supports_mte() checks,
 * as KASAN only calls them when MTE is supported and enabled.
 */
#ifdef CONFIG_ARM64_MTE

static inline u8 mte_get_ptr_tag(void *ptr)
{
@@ -25,9 +28,54 @@ static inline u8 mte_get_ptr_tag(void *ptr)
	return tag;
}

u8 mte_get_mem_tag(void *addr);
u8 mte_get_random_tag(void);
void *mte_set_mem_tag_range(void *addr, size_t size, u8 tag);
/* Get allocation tag for the address. */
static inline u8 mte_get_mem_tag(void *addr)
{
	asm(__MTE_PREAMBLE "ldg %0, [%0]"
		: "+r" (addr));

	return mte_get_ptr_tag(addr);
}

/* Generate a random tag. */
static inline u8 mte_get_random_tag(void)
{
	void *addr;

	asm(__MTE_PREAMBLE "irg %0, %0"
		: "=r" (addr));

	return mte_get_ptr_tag(addr);
}

/*
 * Assign allocation tags for a region of memory based on the pointer tag.
 * Note: The address must be non-NULL and MTE_GRANULE_SIZE aligned and
 * size must be non-zero and MTE_GRANULE_SIZE aligned.
 */
static inline void mte_set_mem_tag_range(void *addr, size_t size, u8 tag)
{
	u64 curr, end;

	if (!size)
		return;

	curr = (u64)__tag_set(addr, tag);
	end = curr + size;

	do {
		/*
		 * 'asm volatile' is required to prevent the compiler to move
		 * the statement outside of the loop.
		 */
		asm volatile(__MTE_PREAMBLE "stg %0, [%0]"
			     :
			     : "r" (curr)
			     : "memory");

		curr += MTE_GRANULE_SIZE;
	} while (curr != end);
}

void mte_enable_kernel(void);
void mte_init_tags(u64 max_tag);
@@ -46,13 +94,14 @@ static inline u8 mte_get_mem_tag(void *addr)
{
	return 0xFF;
}

static inline u8 mte_get_random_tag(void)
{
	return 0xFF;
}
static inline void *mte_set_mem_tag_range(void *addr, size_t size, u8 tag)

static inline void mte_set_mem_tag_range(void *addr, size_t size, u8 tag)
{
	return addr;
}

static inline void mte_enable_kernel(void)
+0 −2
Original line number Diff line number Diff line
@@ -8,8 +8,6 @@
#include <asm/compiler.h>
#include <asm/mte-def.h>

#define __MTE_PREAMBLE		ARM64_ASM_PREAMBLE ".arch_extension memtag\n"

#ifndef __ASSEMBLY__

#include <linux/bitfield.h>
Loading