Commit f23699c9 authored by Nicholas Piggin's avatar Nicholas Piggin Committed by Michael Ellerman
Browse files

powerpc/64: allow alternate return locations for soft-masked interrupts



The exception table fixup adjusts a failed page fault's interrupt return
location if it was taken at an address specified in the exception table,
to a corresponding fixup handler address.

Introduce a variation of that idea which adds a fixup table for NMIs and
soft-masked asynchronous interrupts. This will be used to protect
certain critical sections that are sensitive to being clobbered by
interrupts coming in (due to using the same SPRs and/or irq soft-mask
state).

Signed-off-by: default avatarNicholas Piggin <npiggin@gmail.com>
Signed-off-by: default avatarMichael Ellerman <mpe@ellerman.id.au>
Link: https://lore.kernel.org/r/20210617155116.2167984-10-npiggin@gmail.com
parent 63e40806
Loading
Loading
Loading
Loading
+13 −0
Original line number Diff line number Diff line
@@ -73,6 +73,11 @@
#include <asm/kprobes.h>
#include <asm/runlatch.h>

#ifdef CONFIG_PPC64
extern char __end_soft_masked[];
unsigned long search_kernel_restart_table(unsigned long addr);
#endif

#ifdef CONFIG_PPC_BOOK3S_64
static inline void srr_regs_clobbered(void)
{
@@ -269,6 +274,14 @@ static inline void interrupt_nmi_exit_prepare(struct pt_regs *regs, struct inter
	 * new work to do (must use irq_work for that).
	 */

#ifdef CONFIG_PPC64
	if (arch_irq_disabled_regs(regs)) {
		unsigned long rst = search_kernel_restart_table(regs->nip);
		if (rst)
			regs_set_return_ip(regs, rst);
	}
#endif

#ifdef CONFIG_PPC64
	if (nmi_disables_ftrace(regs))
		this_cpu_set_ftrace_enabled(state->ftrace_enabled);
+8 −0
Original line number Diff line number Diff line
@@ -762,6 +762,14 @@ END_FTR_SECTION_NESTED(CPU_FTR_CELL_TB_BUG, CPU_FTR_CELL_TB_BUG, 96)
	stringify_in_c(.long (_target) - . ;)	\
	stringify_in_c(.previous)

#define RESTART_TABLE(_start, _end, _target)	\
	stringify_in_c(.section __restart_table,"a";)\
	stringify_in_c(.balign 8;)		\
	stringify_in_c(.llong (_start);)	\
	stringify_in_c(.llong (_end);)		\
	stringify_in_c(.llong (_target);)	\
	stringify_in_c(.previous)

#ifdef CONFIG_PPC_FSL_BOOK3E
#define BTB_FLUSH(reg)			\
	lis reg,BUCSR_INIT@h;		\
+35 −2
Original line number Diff line number Diff line
@@ -901,6 +901,28 @@ kernel_dbg_exc:
	bl	unknown_exception
	b	interrupt_return

.macro SEARCH_RESTART_TABLE
	LOAD_REG_IMMEDIATE_SYM(r14, r11, __start___restart_table)
	LOAD_REG_IMMEDIATE_SYM(r15, r11, __stop___restart_table)
300:
	cmpd	r14,r15
	beq	302f
	ld	r11,0(r14)
	cmpld	r10,r11
	blt	301f
	ld	r11,8(r14)
	cmpld	r10,r11
	bge	301f
	ld	r11,16(r14)
	b	303f
301:
	addi	r14,r14,24
	b	300b
302:
	li	r11,0
303:
.endm

/*
 * An interrupt came in while soft-disabled; We mark paca->irq_happened
 * accordingly and if the interrupt is level sensitive, we hard disable
@@ -909,6 +931,9 @@ kernel_dbg_exc:
 */

.macro masked_interrupt_book3e paca_irq full_mask
	std	r14,PACA_EXGEN+EX_R14(r13)
	std	r15,PACA_EXGEN+EX_R15(r13)

	lbz	r10,PACAIRQHAPPENED(r13)
	.if \full_mask == 1
	ori	r10,r10,\paca_irq | PACA_IRQ_HARD_DIS
@@ -918,15 +943,23 @@ kernel_dbg_exc:
	stb	r10,PACAIRQHAPPENED(r13)

	.if \full_mask == 1
	rldicl	r10,r11,48,1		/* clear MSR_EE */
	rotldi	r11,r10,16
	xori	r11,r11,MSR_EE		/* clear MSR_EE */
	mtspr	SPRN_SRR1,r11
	.endif

	mfspr	r10,SPRN_SRR0
	SEARCH_RESTART_TABLE
	cmpdi	r11,0
	beq	1f
	mtspr	SPRN_SRR0,r11		/* return to restart address */
1:

	lwz	r11,PACA_EXGEN+EX_CR(r13)
	mtcr	r11
	ld	r10,PACA_EXGEN+EX_R10(r13)
	ld	r11,PACA_EXGEN+EX_R11(r13)
	ld	r14,PACA_EXGEN+EX_R14(r13)
	ld	r15,PACA_EXGEN+EX_R15(r13)
	mfspr	r13,SPRN_SPRG_GEN_SCRATCH
	rfi
	b	.
+41 −0
Original line number Diff line number Diff line
@@ -591,6 +591,36 @@ END_FTR_SECTION_IFSET(CPU_FTR_CFAR)
	__GEN_COMMON_BODY \name
.endm

.macro SEARCH_RESTART_TABLE
#ifdef CONFIG_RELOCATABLE
	mr	r12,r2
	ld	r2,PACATOC(r13)
	LOAD_REG_ADDR(r9, __start___restart_table)
	LOAD_REG_ADDR(r10, __stop___restart_table)
	mr	r2,r12
#else
	LOAD_REG_IMMEDIATE_SYM(r9, r12, __start___restart_table)
	LOAD_REG_IMMEDIATE_SYM(r10, r12, __stop___restart_table)
#endif
300:
	cmpd	r9,r10
	beq	302f
	ld	r12,0(r9)
	cmpld	r11,r12
	blt	301f
	ld	r12,8(r9)
	cmpld	r11,r12
	bge	301f
	ld	r12,16(r9)
	b	303f
301:
	addi	r9,r9,24
	b	300b
302:
	li	r12,0
303:
.endm

/*
 * Restore all registers including H/SRR0/1 saved in a stack frame of a
 * standard exception.
@@ -2646,6 +2676,7 @@ EXC_COMMON_BEGIN(soft_nmi_common)
	mtmsrd	r9,1

	kuap_kernel_restore r9, r10

	EXCEPTION_RESTORE_REGS hsrr=0
	RFI_TO_KERNEL

@@ -2703,6 +2734,16 @@ masked_interrupt:
	stb	r9,PACASRR_VALID(r13)
	.endif

	SEARCH_RESTART_TABLE
	cmpdi	r12,0
	beq	3f
	.if \hsrr
	mtspr	SPRN_HSRR0,r12
	.else
	mtspr	SPRN_SRR0,r12
	.endif
3:

	ld	r9,PACA_EXGEN+EX_CTR(r13)
	mtctr	r9
	lwz	r9,PACA_EXGEN+EX_CCR(r13)
+10 −0
Original line number Diff line number Diff line
@@ -9,6 +9,14 @@
#define EMITS_PT_NOTE
#define RO_EXCEPTION_TABLE_ALIGN	0

#define RESTART_TABLE(align)						\
	. = ALIGN(align);						\
	__restart_table : AT(ADDR(__restart_table) - LOAD_OFFSET) {	\
		__start___restart_table = .;				\
		KEEP(*(__restart_table))				\
		__stop___restart_table = .;				\
	}

#include <asm/page.h>
#include <asm-generic/vmlinux.lds.h>
#include <asm/cache.h>
@@ -124,6 +132,8 @@ SECTIONS
	RO_DATA(PAGE_SIZE)

#ifdef CONFIG_PPC64
	RESTART_TABLE(8)

	. = ALIGN(8);
	__stf_entry_barrier_fixup : AT(ADDR(__stf_entry_barrier_fixup) - LOAD_OFFSET) {
		__start___stf_entry_barrier_fixup = .;
Loading