Commit fd81e6bf authored by Marc Zyngier's avatar Marc Zyngier Committed by Christoffer Dall
Browse files

arm64: KVM: Refactor kern_hyp_va to deal with multiple offsets



As we move towards a selectable HYP VA range, it is obvious that
we don't want to test a variable to find out if we need to use
the bottom VA range, the top VA range, or use the address as is
(for VHE).

Instead, we can expand our current helper to generate the right
mask or nop with code patching. We default to using the top VA
space, with alternatives to switch to the bottom one or to nop
out the instructions.

Signed-off-by: default avatarMarc Zyngier <marc.zyngier@arm.com>
Signed-off-by: default avatarChristoffer Dall <christoffer.dall@linaro.org>
parent d53d9bc6
Loading
Loading
Loading
Loading
+0 −11
Original line number Diff line number Diff line
@@ -25,17 +25,6 @@

#define __hyp_text __section(.hyp.text) notrace

static inline unsigned long __kern_hyp_va(unsigned long v)
{
	asm volatile(ALTERNATIVE("and %0, %0, %1",
				 "nop",
				 ARM64_HAS_VIRT_HOST_EXTN)
		     : "+r" (v) : "i" (HYP_PAGE_OFFSET_MASK));
	return v;
}

#define kern_hyp_va(v) (typeof(v))(__kern_hyp_va((unsigned long)(v)))

#define read_sysreg_elx(r,nvh,vh)					\
	({								\
		u64 reg;						\
+39 −3
Original line number Diff line number Diff line
@@ -90,13 +90,33 @@
/*
 * Convert a kernel VA into a HYP VA.
 * reg: VA to be converted.
 *
 * This generates the following sequences:
 * - High mask:
 *		and x0, x0, #HYP_PAGE_OFFSET_HIGH_MASK
 *		nop
 * - Low mask:
 *		and x0, x0, #HYP_PAGE_OFFSET_HIGH_MASK
 *		and x0, x0, #HYP_PAGE_OFFSET_LOW_MASK
 * - VHE:
 *		nop
 *		nop
 *
 * The "low mask" version works because the mask is a strict subset of
 * the "high mask", hence performing the first mask for nothing.
 * Should be completely invisible on any viable CPU.
 */
.macro kern_hyp_va	reg
alternative_if_not ARM64_HAS_VIRT_HOST_EXTN
	and	\reg, \reg, #HYP_PAGE_OFFSET_MASK
	and     \reg, \reg, #HYP_PAGE_OFFSET_HIGH_MASK
alternative_else
	nop
alternative_endif
alternative_if_not ARM64_HYP_OFFSET_LOW
	nop
alternative_else
	and     \reg, \reg, #HYP_PAGE_OFFSET_LOW_MASK
alternative_endif
.endm

#else
@@ -107,7 +127,23 @@ alternative_endif
#include <asm/mmu_context.h>
#include <asm/pgtable.h>

#define KERN_TO_HYP(kva)	((unsigned long)kva & HYP_PAGE_OFFSET_MASK)
static inline unsigned long __kern_hyp_va(unsigned long v)
{
	asm volatile(ALTERNATIVE("and %0, %0, %1",
				 "nop",
				 ARM64_HAS_VIRT_HOST_EXTN)
		     : "+r" (v)
		     : "i" (HYP_PAGE_OFFSET_HIGH_MASK));
	asm volatile(ALTERNATIVE("nop",
				 "and %0, %0, %1",
				 ARM64_HYP_OFFSET_LOW)
		     : "+r" (v)
		     : "i" (HYP_PAGE_OFFSET_LOW_MASK));
	return v;
}

#define kern_hyp_va(v) 	(typeof(v))(__kern_hyp_va((unsigned long)(v)))
#define KERN_TO_HYP(v)	kern_hyp_va(v)

/*
 * We currently only support a 40bit IPA.