Commit 45c23bf4 authored by Masami Hiramatsu's avatar Masami Hiramatsu Committed by Alexei Starovoitov
Browse files

x86,kprobes: Fix optprobe trampoline to generate complete pt_regs



Currently the optprobe trampoline template code ganerate an
almost complete pt_regs on-stack, everything except regs->ss.
The 'regs->ss' points to the top of stack, which is not a
valid segment decriptor.

As same as the rethook does, complete the job by also pushing ss.

Suggested-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Signed-off-by: default avatarMasami Hiramatsu <mhiramat@kernel.org>
Signed-off-by: default avatarAlexei Starovoitov <ast@kernel.org>
Link: https://lore.kernel.org/bpf/164826166027.2455864.14759128090648961900.stgit@devnote2
parent 0ef6f5c0
Loading
Loading
Loading
Loading
+16 −9
Original line number Diff line number Diff line
@@ -106,7 +106,8 @@ asm (
			".global optprobe_template_entry\n"
			"optprobe_template_entry:\n"
#ifdef CONFIG_X86_64
			/* We don't bother saving the ss register */
			"       pushq $" __stringify(__KERNEL_DS) "\n"
			/* Save the 'sp - 8', this will be fixed later. */
			"	pushq %rsp\n"
			"	pushfq\n"
			".global optprobe_template_clac\n"
@@ -121,14 +122,17 @@ asm (
			".global optprobe_template_call\n"
			"optprobe_template_call:\n"
			ASM_NOP5
			/* Move flags to rsp */
			/* Copy 'regs->flags' into 'regs->ss'. */
			"	movq 18*8(%rsp), %rdx\n"
			"	movq %rdx, 19*8(%rsp)\n"
			"	movq %rdx, 20*8(%rsp)\n"
			RESTORE_REGS_STRING
			/* Skip flags entry */
			"	addq $8, %rsp\n"
			/* Skip 'regs->flags' and 'regs->sp'. */
			"	addq $16, %rsp\n"
			/* And pop flags register from 'regs->ss'. */
			"	popfq\n"
#else /* CONFIG_X86_32 */
			"	pushl %ss\n"
			/* Save the 'sp - 4', this will be fixed later. */
			"	pushl %esp\n"
			"	pushfl\n"
			".global optprobe_template_clac\n"
@@ -142,12 +146,13 @@ asm (
			".global optprobe_template_call\n"
			"optprobe_template_call:\n"
			ASM_NOP5
			/* Move flags into esp */
			/* Copy 'regs->flags' into 'regs->ss'. */
			"	movl 14*4(%esp), %edx\n"
			"	movl %edx, 15*4(%esp)\n"
			"	movl %edx, 16*4(%esp)\n"
			RESTORE_REGS_STRING
			/* Skip flags entry */
			"	addl $4, %esp\n"
			/* Skip 'regs->flags' and 'regs->sp'. */
			"	addl $8, %esp\n"
			/* And pop flags register from 'regs->ss'. */
			"	popfl\n"
#endif
			".global optprobe_template_end\n"
@@ -179,6 +184,8 @@ optimized_callback(struct optimized_kprobe *op, struct pt_regs *regs)
		kprobes_inc_nmissed_count(&op->kp);
	} else {
		struct kprobe_ctlblk *kcb = get_kprobe_ctlblk();
		/* Adjust stack pointer */
		regs->sp += sizeof(long);
		/* Save skipped registers */
		regs->cs = __KERNEL_CS;
#ifdef CONFIG_X86_32