Commit 55123aff authored by Mark Rutland's avatar Mark Rutland Committed by Catalin Marinas
Browse files

arm64: kasan: remove !KASAN_VMALLOC remnants



Historically, KASAN could be selected with or without KASAN_VMALLOC, but
since commit:

  f6f37d93 ("arm64: select KASAN_VMALLOC for SW/HW_TAGS modes")

... we can never select KASAN without KASAN_VMALLOC on arm64, and thus
arm64 code for KASAN && !KASAN_VMALLOC is redundant and can be removed.

Remove the redundant code kasan_init.c

Signed-off-by: default avatarMark Rutland <mark.rutland@arm.com>
Reviewed-by: default avatarAlexander Potapenko <glider@google.com>
Reviewed-by: default avatarArd Biesheuvel <ardb@kernel.org>
Cc: Andrey Konovalov <andreyknvl@google.com>
Cc: Andrey Ryabinin <ryabinin.a.a@gmail.com>
Cc: Dmitry Vyukov <dvyukov@google.com>
Cc: Will Deacon <will@kernel.org>
Tested-by: default avatarShanker Donthineni <sdonthineni@nvidia.com>
Link: https://lore.kernel.org/r/20230530110328.2213762-3-mark.rutland@arm.com


Signed-off-by: default avatarCatalin Marinas <catalin.marinas@arm.com>
parent 8339f7d8
Loading
Loading
Loading
Loading
+4 −13
Original line number Diff line number Diff line
@@ -214,7 +214,7 @@ static void __init clear_pgds(unsigned long start,
static void __init kasan_init_shadow(void)
{
	u64 kimg_shadow_start, kimg_shadow_end;
	u64 mod_shadow_start, mod_shadow_end;
	u64 mod_shadow_start;
	u64 vmalloc_shadow_end;
	phys_addr_t pa_start, pa_end;
	u64 i;
@@ -223,7 +223,6 @@ static void __init kasan_init_shadow(void)
	kimg_shadow_end = PAGE_ALIGN((u64)kasan_mem_to_shadow(KERNEL_END));

	mod_shadow_start = (u64)kasan_mem_to_shadow((void *)MODULES_VADDR);
	mod_shadow_end = (u64)kasan_mem_to_shadow((void *)MODULES_END);

	vmalloc_shadow_end = (u64)kasan_mem_to_shadow((void *)VMALLOC_END);

@@ -246,17 +245,9 @@ static void __init kasan_init_shadow(void)
	kasan_populate_early_shadow(kasan_mem_to_shadow((void *)PAGE_END),
				   (void *)mod_shadow_start);

	if (IS_ENABLED(CONFIG_KASAN_VMALLOC)) {
	BUILD_BUG_ON(VMALLOC_START != MODULES_END);
	kasan_populate_early_shadow((void *)vmalloc_shadow_end,
				    (void *)KASAN_SHADOW_END);
	} else {
		kasan_populate_early_shadow((void *)kimg_shadow_end,
					    (void *)KASAN_SHADOW_END);
		if (kimg_shadow_start > mod_shadow_end)
			kasan_populate_early_shadow((void *)mod_shadow_end,
						    (void *)kimg_shadow_start);
	}

	for_each_mem_range(i, &pa_start, &pa_end) {
		void *start = (void *)__phys_to_virt(pa_start);