Commit e1f358b5 authored by Steven Price's avatar Steven Price Committed by Marc Zyngier
Browse files

KVM: arm64: Save/restore MTE registers



Define the new system registers that MTE introduces and context switch
them. The MTE feature is still hidden from the ID register as it isn't
supported in a VM yet.

Reviewed-by: default avatarCatalin Marinas <catalin.marinas@arm.com>
Signed-off-by: default avatarSteven Price <steven.price@arm.com>
Signed-off-by: default avatarMarc Zyngier <maz@kernel.org>
Link: https://lore.kernel.org/r/20210621111716.37157-4-steven.price@arm.com
parent ea7fc1bb
Loading
Loading
Loading
Loading
+2 −1
Original line number Diff line number Diff line
@@ -12,7 +12,8 @@
#include <asm/types.h>

/* Hyp Configuration Register (HCR) bits */
#define HCR_ATA		(UL(1) << 56)
#define HCR_ATA_SHIFT	56
#define HCR_ATA		(UL(1) << HCR_ATA_SHIFT)
#define HCR_FWB		(UL(1) << 46)
#define HCR_API		(UL(1) << 41)
#define HCR_APK		(UL(1) << 40)
+6 −0
Original line number Diff line number Diff line
@@ -209,6 +209,12 @@ enum vcpu_sysreg {
	CNTP_CVAL_EL0,
	CNTP_CTL_EL0,

	/* Memory Tagging Extension registers */
	RGSR_EL1,	/* Random Allocation Tag Seed Register */
	GCR_EL1,	/* Tag Control Register */
	TFSR_EL1,	/* Tag Fault Status Register (EL1) */
	TFSRE0_EL1,	/* Tag Fault Status Register (EL0) */

	/* 32bit specific registers. Keep them at the end of the range */
	DACR32_EL2,	/* Domain Access Control Register */
	IFSR32_EL2,	/* Instruction Fault Status Register */
+66 −0
Original line number Diff line number Diff line
/* SPDX-License-Identifier: GPL-2.0 */
/*
 * Copyright (C) 2020-2021 ARM Ltd.
 */
#ifndef __ASM_KVM_MTE_H
#define __ASM_KVM_MTE_H

#ifdef __ASSEMBLY__

#include <asm/sysreg.h>

#ifdef CONFIG_ARM64_MTE

.macro mte_switch_to_guest g_ctxt, h_ctxt, reg1
alternative_if_not ARM64_MTE
	b	.L__skip_switch\@
alternative_else_nop_endif
	mrs	\reg1, hcr_el2
	tbz	\reg1, #(HCR_ATA_SHIFT), .L__skip_switch\@

	mrs_s	\reg1, SYS_RGSR_EL1
	str	\reg1, [\h_ctxt, #CPU_RGSR_EL1]
	mrs_s	\reg1, SYS_GCR_EL1
	str	\reg1, [\h_ctxt, #CPU_GCR_EL1]

	ldr	\reg1, [\g_ctxt, #CPU_RGSR_EL1]
	msr_s	SYS_RGSR_EL1, \reg1
	ldr	\reg1, [\g_ctxt, #CPU_GCR_EL1]
	msr_s	SYS_GCR_EL1, \reg1

.L__skip_switch\@:
.endm

.macro mte_switch_to_hyp g_ctxt, h_ctxt, reg1
alternative_if_not ARM64_MTE
	b	.L__skip_switch\@
alternative_else_nop_endif
	mrs	\reg1, hcr_el2
	tbz	\reg1, #(HCR_ATA_SHIFT), .L__skip_switch\@

	mrs_s	\reg1, SYS_RGSR_EL1
	str	\reg1, [\g_ctxt, #CPU_RGSR_EL1]
	mrs_s	\reg1, SYS_GCR_EL1
	str	\reg1, [\g_ctxt, #CPU_GCR_EL1]

	ldr	\reg1, [\h_ctxt, #CPU_RGSR_EL1]
	msr_s	SYS_RGSR_EL1, \reg1
	ldr	\reg1, [\h_ctxt, #CPU_GCR_EL1]
	msr_s	SYS_GCR_EL1, \reg1

	isb

.L__skip_switch\@:
.endm

#else /* !CONFIG_ARM64_MTE */

.macro mte_switch_to_guest g_ctxt, h_ctxt, reg1
.endm

.macro mte_switch_to_hyp g_ctxt, h_ctxt, reg1
.endm

#endif /* CONFIG_ARM64_MTE */
#endif /* __ASSEMBLY__ */
#endif /* __ASM_KVM_MTE_H */
+2 −1
Original line number Diff line number Diff line
@@ -651,7 +651,8 @@

#define INIT_SCTLR_EL2_MMU_ON						\
	(SCTLR_ELx_M  | SCTLR_ELx_C | SCTLR_ELx_SA | SCTLR_ELx_I |	\
	 SCTLR_ELx_IESB | SCTLR_ELx_WXN | ENDIAN_SET_EL2 | SCTLR_EL2_RES1)
	 SCTLR_ELx_IESB | SCTLR_ELx_WXN | ENDIAN_SET_EL2 |		\
	 SCTLR_ELx_ITFSB | SCTLR_EL2_RES1)

#define INIT_SCTLR_EL2_MMU_OFF \
	(SCTLR_EL2_RES1 | ENDIAN_SET_EL2)
+2 −0
Original line number Diff line number Diff line
@@ -111,6 +111,8 @@ int main(void)
  DEFINE(VCPU_WORKAROUND_FLAGS,	offsetof(struct kvm_vcpu, arch.workaround_flags));
  DEFINE(VCPU_HCR_EL2,		offsetof(struct kvm_vcpu, arch.hcr_el2));
  DEFINE(CPU_USER_PT_REGS,	offsetof(struct kvm_cpu_context, regs));
  DEFINE(CPU_RGSR_EL1,		offsetof(struct kvm_cpu_context, sys_regs[RGSR_EL1]));
  DEFINE(CPU_GCR_EL1,		offsetof(struct kvm_cpu_context, sys_regs[GCR_EL1]));
  DEFINE(CPU_APIAKEYLO_EL1,	offsetof(struct kvm_cpu_context, sys_regs[APIAKEYLO_EL1]));
  DEFINE(CPU_APIBKEYLO_EL1,	offsetof(struct kvm_cpu_context, sys_regs[APIBKEYLO_EL1]));
  DEFINE(CPU_APDAKEYLO_EL1,	offsetof(struct kvm_cpu_context, sys_regs[APDAKEYLO_EL1]));
Loading