Commit ddc806b5 authored by Mark Brown's avatar Mark Brown Committed by Will Deacon
Browse files

arm64/sve: Explicitly load vector length when restoring SVE state



Currently when restoring the SVE state we supply the SVE vector length
as an argument to sve_load_state() and the underlying macros. This becomes
inconvenient with the addition of SME since we may need to restore any
combination of SVE and SME vector lengths, and we already separately
restore the vector length in the KVM code. We don't need to know the vector
length during the actual register load since the SME load instructions can
index into the data array for us.

Refactor the interface so we explicitly set the vector length separately
to restoring the SVE registers in preparation for adding SME support, no
functional change should be involved.

Signed-off-by: default avatarMark Brown <broonie@kernel.org>
Link: https://lore.kernel.org/r/20211019172247.3045838-9-broonie@kernel.org


Signed-off-by: default avatarWill Deacon <will@kernel.org>
parent b5bc00ff
Loading
Loading
Loading
Loading
+1 −1
Original line number Diff line number Diff line
@@ -67,7 +67,7 @@ static inline void *sve_pffr(struct thread_struct *thread)

extern void sve_save_state(void *state, u32 *pfpsr, int save_ffr);
extern void sve_load_state(void const *state, u32 const *pfpsr,
			   int restore_ffr, unsigned long vq_minus_1);
			   int restore_ffr);
extern void sve_flush_live(bool flush_ffr, unsigned long vq_minus_1);
extern unsigned int sve_get_vl(void);
extern void sve_set_vq(unsigned long vq_minus_1);
+1 −6
Original line number Diff line number Diff line
@@ -241,7 +241,7 @@
		str		w\nxtmp, [\xpfpsr, #4]
.endm

.macro __sve_load nxbase, xpfpsr, restore_ffr, nxtmp
.macro sve_load nxbase, xpfpsr, restore_ffr, nxtmp
 _for n, 0, 31,	_sve_ldr_v	\n, \nxbase, \n - 34
		cbz		\restore_ffr, 921f
		_sve_ldr_p	0, \nxbase
@@ -254,8 +254,3 @@
		ldr		w\nxtmp, [\xpfpsr, #4]
		msr		fpcr, x\nxtmp
.endm

.macro sve_load nxbase, xpfpsr, restore_ffr, xvqminus1, nxtmp, xtmp2
		sve_load_vq	\xvqminus1, x\nxtmp, \xtmp2
		__sve_load	\nxbase, \xpfpsr, \restore_ffr, \nxtmp
.endm
+1 −2
Original line number Diff line number Diff line
@@ -51,10 +51,9 @@ SYM_FUNC_END(sve_save_state)
 * x0 - pointer to buffer for state
 * x1 - pointer to storage for FPSR
 * x2 - Restore FFR if non-zero
 * x3 - VQ-1
 */
SYM_FUNC_START(sve_load_state)
	sve_load 0, x1, x2, x3, 4, x5
	sve_load 0, x1, x2, 4
	ret
SYM_FUNC_END(sve_load_state)

+7 −6
Original line number Diff line number Diff line
@@ -318,13 +318,14 @@ static void task_fpsimd_load(void)
	WARN_ON(!system_supports_fpsimd());
	WARN_ON(!have_cpu_fpsimd_context());

	if (IS_ENABLED(CONFIG_ARM64_SVE) && test_thread_flag(TIF_SVE))
	if (IS_ENABLED(CONFIG_ARM64_SVE) && test_thread_flag(TIF_SVE)) {
		sve_set_vq(sve_vq_from_vl(task_get_sve_vl(current)) - 1);
		sve_load_state(sve_pffr(&current->thread),
			       &current->thread.uw.fpsimd_state.fpsr, true,
			       sve_vq_from_vl(task_get_sve_vl(current)) - 1);
	else
			       &current->thread.uw.fpsimd_state.fpsr, true);
	} else {
		fpsimd_load_state(&current->thread.uw.fpsimd_state);
	}
}

/*
 * Ensure FPSIMD/SVE storage in memory for the loaded context is up to
@@ -1423,10 +1424,10 @@ void __efi_fpsimd_end(void)
		    likely(__this_cpu_read(efi_sve_state_used))) {
			char const *sve_state = this_cpu_ptr(efi_sve_state);

			sve_set_vq(sve_vq_from_vl(sve_get_vl()) - 1);
			sve_load_state(sve_state + sve_ffr_offset(sve_max_vl()),
				       &this_cpu_ptr(&efi_fpsimd_state)->fpsr,
				       true,
				       sve_vq_from_vl(sve_get_vl()) - 1);
				       true);

			__this_cpu_write(efi_sve_state_used, false);
		} else {
+1 −1
Original line number Diff line number Diff line
@@ -22,7 +22,7 @@ SYM_FUNC_END(__fpsimd_restore_state)

SYM_FUNC_START(__sve_restore_state)
	mov	x2, #1
	__sve_load 0, x1, x2, 3
	sve_load 0, x1, x2, 3
	ret
SYM_FUNC_END(__sve_restore_state)