Commit 33634e42 authored by Juergen Gross's avatar Juergen Gross Committed by Borislav Petkov
Browse files

x86/paravirt: Remove no longer needed 32-bit pvops cruft



PVOP_VCALL4() is only used for Xen PV, while PVOP_CALL4() isn't used
at all. Keep PVOP_CALL4() for 64 bits due to symmetry reasons.

This allows to remove the 32-bit definitions of those macros leading
to a substantial simplification of the paravirt macros, as those were
the only ones needing non-empty "pre" and "post" parameters.

PVOP_CALLEE2() and PVOP_VCALLEE2() are used nowhere, so remove them.

Another no longer needed case is special handling of return types
larger than unsigned long. Replace that with a BUILD_BUG_ON().

DISABLE_INTERRUPTS() is used in 32-bit code only, so it can just be
replaced by cli.

INTERRUPT_RETURN in 32-bit code can be replaced by iret.

ENABLE_INTERRUPTS is used nowhere, so it can be removed.

Signed-off-by: default avatarJuergen Gross <jgross@suse.com>
Signed-off-by: default avatarBorislav Petkov <bp@suse.de>
Acked-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Link: https://lkml.kernel.org/r/20210311142319.4723-10-jgross@suse.com
parent 4e629211
Loading
Loading
Loading
Loading
+2 −2
Original line number Diff line number Diff line
@@ -430,7 +430,7 @@
	 * will soon execute iret and the tracer was already set to
	 * the irqstate after the IRET:
	 */
	DISABLE_INTERRUPTS(CLBR_ANY)
	cli
	lss	(%esp), %esp			/* switch to espfix segment */
.Lend_\@:
#endif /* CONFIG_X86_ESPFIX32 */
@@ -1077,7 +1077,7 @@ restore_all_switch_stack:
	 * when returning from IPI handler and when returning from
	 * scheduler to user-space.
	 */
	INTERRUPT_RETURN
	iret

.section .fixup, "ax"
SYM_CODE_START(asm_iret_error)
+0 −5
Original line number Diff line number Diff line
@@ -109,9 +109,6 @@ static __always_inline unsigned long arch_local_irq_save(void)
}
#else

#define ENABLE_INTERRUPTS(x)	sti
#define DISABLE_INTERRUPTS(x)	cli

#ifdef CONFIG_X86_64
#ifdef CONFIG_DEBUG_ENTRY
#define SAVE_FLAGS(x)		pushfq; popq %rax
@@ -119,8 +116,6 @@ static __always_inline unsigned long arch_local_irq_save(void)

#define INTERRUPT_RETURN	jmp native_iret

#else
#define INTERRUPT_RETURN		iret
#endif

#endif /* __ASSEMBLY__ */
+1 −34
Original line number Diff line number Diff line
@@ -719,6 +719,7 @@ extern void default_banner(void);
	.if ((~(set)) & mask); pop %reg; .endif

#ifdef CONFIG_X86_64
#ifdef CONFIG_PARAVIRT_XXL

#define PV_SAVE_REGS(set)			\
	COND_PUSH(set, CLBR_RAX, rax);		\
@@ -744,46 +745,12 @@ extern void default_banner(void);
#define PARA_PATCH(off)		((off) / 8)
#define PARA_SITE(ptype, ops)	_PVSITE(ptype, ops, .quad, 8)
#define PARA_INDIRECT(addr)	*addr(%rip)
#else
#define PV_SAVE_REGS(set)			\
	COND_PUSH(set, CLBR_EAX, eax);		\
	COND_PUSH(set, CLBR_EDI, edi);		\
	COND_PUSH(set, CLBR_ECX, ecx);		\
	COND_PUSH(set, CLBR_EDX, edx)
#define PV_RESTORE_REGS(set)			\
	COND_POP(set, CLBR_EDX, edx);		\
	COND_POP(set, CLBR_ECX, ecx);		\
	COND_POP(set, CLBR_EDI, edi);		\
	COND_POP(set, CLBR_EAX, eax)

#define PARA_PATCH(off)		((off) / 4)
#define PARA_SITE(ptype, ops)	_PVSITE(ptype, ops, .long, 4)
#define PARA_INDIRECT(addr)	*%cs:addr
#endif

#ifdef CONFIG_PARAVIRT_XXL
#define INTERRUPT_RETURN						\
	PARA_SITE(PARA_PATCH(PV_CPU_iret),				\
		  ANNOTATE_RETPOLINE_SAFE;				\
		  jmp PARA_INDIRECT(pv_ops+PV_CPU_iret);)

#define DISABLE_INTERRUPTS(clobbers)					\
	PARA_SITE(PARA_PATCH(PV_IRQ_irq_disable),			\
		  PV_SAVE_REGS(clobbers | CLBR_CALLEE_SAVE);		\
		  ANNOTATE_RETPOLINE_SAFE;				\
		  call PARA_INDIRECT(pv_ops+PV_IRQ_irq_disable);	\
		  PV_RESTORE_REGS(clobbers | CLBR_CALLEE_SAVE);)

#define ENABLE_INTERRUPTS(clobbers)					\
	PARA_SITE(PARA_PATCH(PV_IRQ_irq_enable),			\
		  PV_SAVE_REGS(clobbers | CLBR_CALLEE_SAVE);		\
		  ANNOTATE_RETPOLINE_SAFE;				\
		  call PARA_INDIRECT(pv_ops+PV_IRQ_irq_enable);		\
		  PV_RESTORE_REGS(clobbers | CLBR_CALLEE_SAVE);)
#endif

#ifdef CONFIG_X86_64
#ifdef CONFIG_PARAVIRT_XXL
#ifdef CONFIG_DEBUG_ENTRY
#define SAVE_FLAGS(clobbers)                                        \
	PARA_SITE(PARA_PATCH(PV_IRQ_save_fl),			    \
+32 −80
Original line number Diff line number Diff line
@@ -470,55 +470,34 @@ int paravirt_disable_iospace(void);
	})


#define ____PVOP_CALL(rettype, op, clbr, call_clbr, extra_clbr,		\
		      pre, post, ...)					\
#define ____PVOP_CALL(rettype, op, clbr, call_clbr, extra_clbr, ...)	\
	({								\
		rettype __ret;						\
		PVOP_CALL_ARGS;						\
		PVOP_TEST_NULL(op);					\
		/* This is 32-bit specific, but is okay in 64-bit */	\
		/* since this condition will never hold */		\
		if (sizeof(rettype) > sizeof(unsigned long)) {		\
			asm volatile(pre				\
				     paravirt_alt(PARAVIRT_CALL)	\
				     post				\
		BUILD_BUG_ON(sizeof(rettype) > sizeof(unsigned long));	\
		asm volatile(paravirt_alt(PARAVIRT_CALL)		\
			     : call_clbr, ASM_CALL_CONSTRAINT		\
			     : paravirt_type(op),			\
			       paravirt_clobber(clbr),			\
			       ##__VA_ARGS__				\
			     : "memory", "cc" extra_clbr);		\
			__ret = (rettype)((((u64)__edx) << 32) | __eax); \
		} else {						\
			asm volatile(pre				\
				     paravirt_alt(PARAVIRT_CALL)	\
				     post				\
				     : call_clbr, ASM_CALL_CONSTRAINT	\
				     : paravirt_type(op),		\
				       paravirt_clobber(clbr),		\
				       ##__VA_ARGS__			\
				     : "memory", "cc" extra_clbr);	\
			__ret = (rettype)(__eax & PVOP_RETMASK(rettype));	\
		}							\
		__ret;							\
		(rettype)(__eax & PVOP_RETMASK(rettype));		\
	})

#define __PVOP_CALL(rettype, op, pre, post, ...)			\
#define __PVOP_CALL(rettype, op, ...)					\
	____PVOP_CALL(rettype, op, CLBR_ANY, PVOP_CALL_CLOBBERS,	\
		      EXTRA_CLOBBERS, pre, post, ##__VA_ARGS__)
		      EXTRA_CLOBBERS, ##__VA_ARGS__)

#define __PVOP_CALLEESAVE(rettype, op, pre, post, ...)			\
#define __PVOP_CALLEESAVE(rettype, op, ...)				\
	____PVOP_CALL(rettype, op.func, CLBR_RET_REG,			\
		      PVOP_CALLEE_CLOBBERS, ,				\
		      pre, post, ##__VA_ARGS__)
		      PVOP_CALLEE_CLOBBERS, , ##__VA_ARGS__)


#define ____PVOP_VCALL(op, clbr, call_clbr, extra_clbr, pre, post, ...)	\
#define ____PVOP_VCALL(op, clbr, call_clbr, extra_clbr, ...)		\
	({								\
		PVOP_VCALL_ARGS;					\
		PVOP_TEST_NULL(op);					\
		asm volatile(pre					\
			     paravirt_alt(PARAVIRT_CALL)		\
			     post					\
		asm volatile(paravirt_alt(PARAVIRT_CALL)		\
			     : call_clbr, ASM_CALL_CONSTRAINT		\
			     : paravirt_type(op),			\
			       paravirt_clobber(clbr),			\
@@ -526,84 +505,57 @@ int paravirt_disable_iospace(void);
			     : "memory", "cc" extra_clbr);		\
	})

#define __PVOP_VCALL(op, pre, post, ...)				\
#define __PVOP_VCALL(op, ...)						\
	____PVOP_VCALL(op, CLBR_ANY, PVOP_VCALL_CLOBBERS,		\
		       VEXTRA_CLOBBERS,					\
		       pre, post, ##__VA_ARGS__)
		       VEXTRA_CLOBBERS, ##__VA_ARGS__)

#define __PVOP_VCALLEESAVE(op, pre, post, ...)				\
#define __PVOP_VCALLEESAVE(op, ...)					\
	____PVOP_VCALL(op.func, CLBR_RET_REG,				\
		      PVOP_VCALLEE_CLOBBERS, ,				\
		      pre, post, ##__VA_ARGS__)
		      PVOP_VCALLEE_CLOBBERS, , ##__VA_ARGS__)



#define PVOP_CALL0(rettype, op)						\
	__PVOP_CALL(rettype, op, "", "")
	__PVOP_CALL(rettype, op)
#define PVOP_VCALL0(op)							\
	__PVOP_VCALL(op, "", "")
	__PVOP_VCALL(op)

#define PVOP_CALLEE0(rettype, op)					\
	__PVOP_CALLEESAVE(rettype, op, "", "")
	__PVOP_CALLEESAVE(rettype, op)
#define PVOP_VCALLEE0(op)						\
	__PVOP_VCALLEESAVE(op, "", "")
	__PVOP_VCALLEESAVE(op)


#define PVOP_CALL1(rettype, op, arg1)					\
	__PVOP_CALL(rettype, op, "", "", PVOP_CALL_ARG1(arg1))
	__PVOP_CALL(rettype, op, PVOP_CALL_ARG1(arg1))
#define PVOP_VCALL1(op, arg1)						\
	__PVOP_VCALL(op, "", "", PVOP_CALL_ARG1(arg1))
	__PVOP_VCALL(op, PVOP_CALL_ARG1(arg1))

#define PVOP_CALLEE1(rettype, op, arg1)					\
	__PVOP_CALLEESAVE(rettype, op, "", "", PVOP_CALL_ARG1(arg1))
	__PVOP_CALLEESAVE(rettype, op, PVOP_CALL_ARG1(arg1))
#define PVOP_VCALLEE1(op, arg1)						\
	__PVOP_VCALLEESAVE(op, "", "", PVOP_CALL_ARG1(arg1))
	__PVOP_VCALLEESAVE(op, PVOP_CALL_ARG1(arg1))


#define PVOP_CALL2(rettype, op, arg1, arg2)				\
	__PVOP_CALL(rettype, op, "", "", PVOP_CALL_ARG1(arg1),		\
		    PVOP_CALL_ARG2(arg2))
	__PVOP_CALL(rettype, op, PVOP_CALL_ARG1(arg1), PVOP_CALL_ARG2(arg2))
#define PVOP_VCALL2(op, arg1, arg2)					\
	__PVOP_VCALL(op, "", "", PVOP_CALL_ARG1(arg1),			\
		     PVOP_CALL_ARG2(arg2))

#define PVOP_CALLEE2(rettype, op, arg1, arg2)				\
	__PVOP_CALLEESAVE(rettype, op, "", "", PVOP_CALL_ARG1(arg1),	\
			  PVOP_CALL_ARG2(arg2))
#define PVOP_VCALLEE2(op, arg1, arg2)					\
	__PVOP_VCALLEESAVE(op, "", "", PVOP_CALL_ARG1(arg1),		\
			   PVOP_CALL_ARG2(arg2))

	__PVOP_VCALL(op, PVOP_CALL_ARG1(arg1), PVOP_CALL_ARG2(arg2))

#define PVOP_CALL3(rettype, op, arg1, arg2, arg3)			\
	__PVOP_CALL(rettype, op, "", "", PVOP_CALL_ARG1(arg1),		\
	__PVOP_CALL(rettype, op, PVOP_CALL_ARG1(arg1),			\
		    PVOP_CALL_ARG2(arg2), PVOP_CALL_ARG3(arg3))
#define PVOP_VCALL3(op, arg1, arg2, arg3)				\
	__PVOP_VCALL(op, "", "", PVOP_CALL_ARG1(arg1),			\
	__PVOP_VCALL(op, PVOP_CALL_ARG1(arg1),				\
		     PVOP_CALL_ARG2(arg2), PVOP_CALL_ARG3(arg3))

/* This is the only difference in x86_64. We can make it much simpler */
#ifdef CONFIG_X86_32
#define PVOP_CALL4(rettype, op, arg1, arg2, arg3, arg4)			\
	__PVOP_CALL(rettype, op,					\
		    "push %[_arg4];", "lea 4(%%esp),%%esp;",		\
		    PVOP_CALL_ARG1(arg1), PVOP_CALL_ARG2(arg2),		\
		    PVOP_CALL_ARG3(arg3), [_arg4] "mr" ((u32)(arg4)))
#define PVOP_VCALL4(op, arg1, arg2, arg3, arg4)				\
	__PVOP_VCALL(op,						\
		    "push %[_arg4];", "lea 4(%%esp),%%esp;",		\
		    "0" ((u32)(arg1)), "1" ((u32)(arg2)),		\
		    "2" ((u32)(arg3)), [_arg4] "mr" ((u32)(arg4)))
#else
#define PVOP_CALL4(rettype, op, arg1, arg2, arg3, arg4)			\
	__PVOP_CALL(rettype, op, "", "",				\
		    PVOP_CALL_ARG1(arg1), PVOP_CALL_ARG2(arg2),		\
		    PVOP_CALL_ARG3(arg3), PVOP_CALL_ARG4(arg4))
#define PVOP_VCALL4(op, arg1, arg2, arg3, arg4)				\
	__PVOP_VCALL(op, "", "",					\
		     PVOP_CALL_ARG1(arg1), PVOP_CALL_ARG2(arg2),	\
	__PVOP_VCALL(op, PVOP_CALL_ARG1(arg1), PVOP_CALL_ARG2(arg2),	\
		     PVOP_CALL_ARG3(arg3), PVOP_CALL_ARG4(arg4))
#endif

/* Lazy mode for batching updates / context switch */
enum paravirt_lazy_mode {
+0 −2
Original line number Diff line number Diff line
@@ -63,8 +63,6 @@ static void __used common(void)

#ifdef CONFIG_PARAVIRT_XXL
	BLANK();
	OFFSET(PV_IRQ_irq_disable, paravirt_patch_template, irq.irq_disable);
	OFFSET(PV_IRQ_irq_enable, paravirt_patch_template, irq.irq_enable);
	OFFSET(PV_CPU_iret, paravirt_patch_template, cpu.iret);
#endif