Commit 76249ddc authored by Christophe Leroy's avatar Christophe Leroy Committed by Michael Ellerman
Browse files

powerpc/32: On syscall entry, enable instruction translation at the same time as data



On 40x and 8xx, kernel text is pinned.
On book3s/32, kernel text is mapped by BATs.

Enable instruction translation at the same time as data translation, it
makes things simpler.

MSR_RI can also be set at the same time because srr0/srr1 are already
saved and r1 is set properly.

On booke, translation is always on, so at the end all PPC32
have translation on early.

This reduces null_syscall benchmark by 13 cycles on 8xx
(296 ==> 283 cycles).

Signed-off-by: default avatarChristophe Leroy <christophe.leroy@csgroup.eu>
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/3fe8891c814103a3549efc1d4e7ffc828bba5993.1612796617.git.christophe.leroy@csgroup.eu
parent eca24110
Loading
Loading
Loading
Loading
+9 −17
Original line number Diff line number Diff line
@@ -125,9 +125,13 @@
	lwz	r1,TASK_STACK-THREAD(r12)
	beq-	99f
	addi	r1, r1, THREAD_SIZE - INT_FRAME_SIZE
	LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~(MSR_IR | MSR_RI)) /* can take DTLB miss */
	mtmsr	r10
	isync
	LOAD_REG_IMMEDIATE(r10, MSR_KERNEL)		/* can take exceptions */
	mtspr	SPRN_SRR1, r10
	lis	r10, 1f@h
	ori	r10, r10, 1f@l
	mtspr	SPRN_SRR0, r10
	rfi
1:
	tovirt(r12, r12)
	stw	r11,GPR1(r1)
	stw	r11,0(r1)
@@ -141,9 +145,6 @@
	stw	r10,_CCR(r11)		/* save registers */
#ifdef CONFIG_40x
	rlwinm	r9,r9,0,14,12		/* clear MSR_WE (necessary?) */
#else
	LOAD_REG_IMMEDIATE(r10, MSR_KERNEL & ~MSR_IR) /* can take exceptions */
	mtmsr	r10			/* (except for mach check in rtas) */
#endif
	lis	r10,STACK_FRAME_REGS_MARKER@ha /* exception frame marker */
	stw	r2,GPR2(r11)
@@ -180,8 +181,6 @@
#endif

3:
	lis	r11, transfer_to_syscall@h
	ori	r11, r11, transfer_to_syscall@l
#ifdef CONFIG_TRACE_IRQFLAGS
	/*
	 * If MSR is changing we need to keep interrupts disabled at this point
@@ -193,15 +192,8 @@
#else
	LOAD_REG_IMMEDIATE(r10, MSR_KERNEL | MSR_EE)
#endif
#if defined(CONFIG_PPC_8xx) && defined(CONFIG_PERF_EVENTS)
	mtspr	SPRN_NRI, r0
#endif
	mtspr	SPRN_SRR1,r10
	mtspr	SPRN_SRR0,r11
	rfi				/* jump to handler, enable MMU */
#ifdef CONFIG_40x
	b .	/* Prevent prefetch past rfi */
#endif
	mtmsr	r10
	b	transfer_to_syscall		/* jump to handler */
99:	b	ret_from_kernel_syscall
.endm

+2 −5
Original line number Diff line number Diff line
@@ -157,8 +157,6 @@ ALT_FTR_SECTION_END_IFSET(CPU_FTR_EMB_HV)
	stw	r12,4(r11)

3:
	lis	r11, transfer_to_syscall@h
	ori	r11, r11, transfer_to_syscall@l
#ifdef CONFIG_TRACE_IRQFLAGS
	/*
	 * If MSR is changing we need to keep interrupts disabled at this point
@@ -172,9 +170,8 @@ ALT_FTR_SECTION_END_IFSET(CPU_FTR_EMB_HV)
	lis	r10, (MSR_KERNEL | MSR_EE)@h
	ori	r10, r10, (MSR_KERNEL | MSR_EE)@l
#endif
	mtspr	SPRN_SRR1,r10
	mtspr	SPRN_SRR0,r11
	rfi				/* jump to handler, enable MMU */
	mtmsr	r10
	b	transfer_to_syscall	/* jump to handler */
99:	b	ret_from_kernel_syscall
.endm