Commit a09a6e23 authored by Peter Zijlstra's avatar Peter Zijlstra Committed by Borislav Petkov
Browse files

objtool: Add entry UNRET validation



Since entry asm is tricky, add a validation pass that ensures the
retbleed mitigation has been done before the first actual RET
instruction.

Entry points are those that either have UNWIND_HINT_ENTRY, which acts
as UNWIND_HINT_EMPTY but marks the instruction as an entry point, or
those that have UWIND_HINT_IRET_REGS at +0.

This is basically a variant of validate_branch() that is
intra-function and it will simply follow all branches from marked
entry points and ensures that all paths lead to ANNOTATE_UNRET_END.

If a path hits RET or an indirection the path is a fail and will be
reported.

There are 3 ANNOTATE_UNRET_END instances:

 - UNTRAIN_RET itself
 - exception from-kernel; this path doesn't need UNTRAIN_RET
 - all early exceptions; these also don't need UNTRAIN_RET

Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: default avatarBorislav Petkov <bp@suse.de>
Reviewed-by: default avatarJosh Poimboeuf <jpoimboe@kernel.org>
Signed-off-by: default avatarBorislav Petkov <bp@suse.de>
parent 0fe4aeea
Loading
Loading
Loading
Loading
+2 −1
Original line number Diff line number Diff line
@@ -85,7 +85,7 @@
 */

SYM_CODE_START(entry_SYSCALL_64)
	UNWIND_HINT_EMPTY
	UNWIND_HINT_ENTRY
	ENDBR

	swapgs
@@ -1095,6 +1095,7 @@ SYM_CODE_START_LOCAL(error_entry)
.Lerror_entry_done_lfence:
	FENCE_SWAPGS_KERNEL_ENTRY
	leaq	8(%rsp), %rax			/* return pt_regs pointer */
	ANNOTATE_UNRET_END
	RET

.Lbstep_iret:
+3 −3
Original line number Diff line number Diff line
@@ -49,7 +49,7 @@
 * 0(%ebp) arg6
 */
SYM_CODE_START(entry_SYSENTER_compat)
	UNWIND_HINT_EMPTY
	UNWIND_HINT_ENTRY
	ENDBR
	/* Interrupts are off on entry. */
	swapgs
@@ -179,7 +179,7 @@ SYM_CODE_END(entry_SYSENTER_compat)
 * 0(%esp) arg6
 */
SYM_CODE_START(entry_SYSCALL_compat)
	UNWIND_HINT_EMPTY
	UNWIND_HINT_ENTRY
	ENDBR
	/* Interrupts are off on entry. */
	swapgs
@@ -305,7 +305,7 @@ SYM_CODE_END(entry_SYSCALL_compat)
 * ebp  arg6
 */
SYM_CODE_START(entry_INT80_compat)
	UNWIND_HINT_EMPTY
	UNWIND_HINT_ENTRY
	ENDBR
	/*
	 * Interrupts are off on entry.
+12 −0
Original line number Diff line number Diff line
@@ -81,6 +81,17 @@
 */
#define ANNOTATE_UNRET_SAFE ANNOTATE_RETPOLINE_SAFE

/*
 * Abuse ANNOTATE_RETPOLINE_SAFE on a NOP to indicate UNRET_END, should
 * eventually turn into it's own annotation.
 */
.macro ANNOTATE_UNRET_END
#ifdef CONFIG_DEBUG_ENTRY
	ANNOTATE_RETPOLINE_SAFE
	nop
#endif
.endm

/*
 * JMP_NOSPEC and CALL_NOSPEC macros can be used instead of a simple
 * indirect jmp/call which may be susceptible to the Spectre variant 2
@@ -131,6 +142,7 @@
 */
.macro UNTRAIN_RET
#ifdef CONFIG_RETPOLINE
	ANNOTATE_UNRET_END
	ALTERNATIVE_2 "",						\
	              "call zen_untrain_ret", X86_FEATURE_UNRET,	\
		      "call entry_ibpb", X86_FEATURE_ENTRY_IBPB
+4 −0
Original line number Diff line number Diff line
@@ -11,6 +11,10 @@
	UNWIND_HINT sp_reg=ORC_REG_UNDEFINED type=UNWIND_HINT_TYPE_CALL end=1
.endm

.macro UNWIND_HINT_ENTRY
	UNWIND_HINT sp_reg=ORC_REG_UNDEFINED type=UNWIND_HINT_TYPE_ENTRY end=1
.endm

.macro UNWIND_HINT_REGS base=%rsp offset=0 indirect=0 extra=1 partial=0
	.if \base == %rsp
		.if \indirect
+5 −0
Original line number Diff line number Diff line
@@ -389,6 +389,8 @@ SYM_CODE_START_NOALIGN(vc_boot_ghcb)
	UNWIND_HINT_IRET_REGS offset=8
	ENDBR

	ANNOTATE_UNRET_END

	/* Build pt_regs */
	PUSH_AND_CLEAR_REGS

@@ -448,6 +450,7 @@ SYM_CODE_END(early_idt_handler_array)

SYM_CODE_START_LOCAL(early_idt_handler_common)
	UNWIND_HINT_IRET_REGS offset=16
	ANNOTATE_UNRET_END
	/*
	 * The stack is the hardware frame, an error code or zero, and the
	 * vector number.
@@ -497,6 +500,8 @@ SYM_CODE_START_NOALIGN(vc_no_ghcb)
	UNWIND_HINT_IRET_REGS offset=8
	ENDBR

	ANNOTATE_UNRET_END

	/* Build pt_regs */
	PUSH_AND_CLEAR_REGS

Loading