Commit ba959fe9 authored by Pasha Tatashin's avatar Pasha Tatashin Committed by Will Deacon
Browse files

arm64: kexec: relocate in EL1 mode



Since we are going to keep MMU enabled during relocation, we need to
keep EL1 mode throughout the relocation.

Keep EL1 enabled, and switch EL2 only before entering the new world.

Suggested-by: default avatarJames Morse <james.morse@arm.com>
Signed-off-by: default avatarPasha Tatashin <pasha.tatashin@soleen.com>
Acked-by: default avatarCatalin Marinas <catalin.marinas@arm.com>
Link: https://lore.kernel.org/r/20210930143113.1502553-10-pasha.tatashin@soleen.com


Signed-off-by: default avatarWill Deacon <will@kernel.org>
parent 08eae0ef
Loading
Loading
Loading
Loading
+1 −2
Original line number Diff line number Diff line
@@ -20,11 +20,10 @@ static inline void __noreturn __nocfi cpu_soft_restart(unsigned long entry,
{
	typeof(__cpu_soft_restart) *restart;

	unsigned long el2_switch = is_hyp_nvhe();
	restart = (void *)__pa_symbol(function_nocfi(__cpu_soft_restart));

	cpu_install_idmap();
	restart(el2_switch, entry, arg0, arg1, arg2);
	restart(0, entry, arg0, arg1, arg2);
	unreachable();
}

+2 −2
Original line number Diff line number Diff line
@@ -240,8 +240,8 @@ void machine_kexec(struct kimage *kimage)
	} else {
		if (is_hyp_nvhe())
			__hyp_set_vectors(kimage->arch.el2_vectors);
		cpu_soft_restart(kimage->arch.kern_reloc, virt_to_phys(kimage),
				 0, 0);
		cpu_soft_restart(kimage->arch.kern_reloc,
				 virt_to_phys(kimage), 0, 0);
	}

	BUG(); /* Should never get here. */
+11 −2
Original line number Diff line number Diff line
@@ -13,6 +13,7 @@
#include <asm/kexec.h>
#include <asm/page.h>
#include <asm/sysreg.h>
#include <asm/virt.h>

/*
 * arm64_relocate_new_kernel - Put a 2nd stage image in place and boot it.
@@ -61,12 +62,20 @@ SYM_CODE_START(arm64_relocate_new_kernel)
	isb

	/* Start new image. */
	ldr	x1, [x0, #KIMAGE_ARCH_EL2_VECTORS]	/* relocation start */
	cbz	x1, .Lel1
	ldr	x1, [x0, #KIMAGE_START]		/* relocation start */
	ldr	x2, [x0, #KIMAGE_ARCH_DTB_MEM]	/* dtb address */
	mov	x3, xzr
	mov	x4, xzr
	mov     x0, #HVC_SOFT_RESTART
	hvc	#0				/* Jumps from el2 */
.Lel1:
	ldr	x4, [x0, #KIMAGE_START]		/* relocation start */
	ldr	x0, [x0, #KIMAGE_ARCH_DTB_MEM]	/* dtb address */
	mov	x1, xzr
	mov	x2, xzr
	mov	x3, xzr
	br	x4
	br	x4				/* Jumps from el1 */
SYM_CODE_END(arm64_relocate_new_kernel)

.align 3	/* To keep the 64-bit values below naturally aligned. */