Commit f1a033cc authored by Juergen Gross's avatar Juergen Gross Committed by Peter Zijlstra
Browse files

x86/paravirt: Use common macro for creating simple asm paravirt functions



There are some paravirt assembler functions which are sharing a common
pattern. Introduce a macro DEFINE_PARAVIRT_ASM() for creating them.

Note that this macro is including explicit alignment of the generated
functions, leading to __raw_callee_save___kvm_vcpu_is_preempted(),
_paravirt_nop() and paravirt_ret0() to be aligned at 4 byte boundaries
now.

The explicit _paravirt_nop() prototype in paravirt.c isn't needed, as
it is included in paravirt_types.h already.

Signed-off-by: default avatarJuergen Gross <jgross@suse.com>
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: default avatarSrivatsa S. Bhat (VMware) <srivatsa@csail.mit.edu>
Link: https://lkml.kernel.org/r/20221109134418.6516-1-jgross@suse.com
parent 5736b1b7
Loading
Loading
Loading
Loading
+12 −0
Original line number Diff line number Diff line
@@ -731,6 +731,18 @@ static __always_inline unsigned long arch_local_irq_save(void)
#undef PVOP_VCALL4
#undef PVOP_CALL4

#define DEFINE_PARAVIRT_ASM(func, instr, sec)		\
	asm (".pushsection " #sec ", \"ax\"\n"		\
	     ".global " #func "\n\t"			\
	     ".type " #func ", @function\n\t"		\
	     ASM_FUNC_ALIGN "\n"			\
	     #func ":\n\t"				\
	     ASM_ENDBR					\
	     instr "\n\t"				\
	     ASM_RET					\
	     ".size " #func ", . - " #func "\n\t"	\
	     ".popsection")

extern void default_banner(void);

#else  /* __ASSEMBLY__ */
+20 −27
Original line number Diff line number Diff line
@@ -14,8 +14,6 @@

__PV_CALLEE_SAVE_REGS_THUNK(__pv_queued_spin_unlock_slowpath, ".spinlock.text");
#define __pv_queued_spin_unlock	__pv_queued_spin_unlock
#define PV_UNLOCK		"__raw_callee_save___pv_queued_spin_unlock"
#define PV_UNLOCK_SLOWPATH	"__raw_callee_save___pv_queued_spin_unlock_slowpath"

/*
 * Optimized assembly version of __raw_callee_save___pv_queued_spin_unlock
@@ -37,32 +35,27 @@ __PV_CALLEE_SAVE_REGS_THUNK(__pv_queued_spin_unlock_slowpath, ".spinlock.text");
 *   rsi = lockval           (second argument)
 *   rdx = internal variable (set to 0)
 */
asm    (".pushsection .spinlock.text, \"ax\";"
	".globl " PV_UNLOCK ";"
	".type " PV_UNLOCK ", @function;"
	ASM_FUNC_ALIGN
	PV_UNLOCK ": "
	ASM_ENDBR
	FRAME_BEGIN
	"push  %rdx;"
	"mov   $0x1,%eax;"
	"xor   %edx,%edx;"
	LOCK_PREFIX "cmpxchg %dl,(%rdi);"
	"cmp   $0x1,%al;"
	"jne   .slowpath;"
	"pop   %rdx;"
#define PV_UNLOCK_ASM							\
	FRAME_BEGIN							\
	"push  %rdx\n\t"						\
	"mov   $0x1,%eax\n\t"						\
	"xor   %edx,%edx\n\t"						\
	LOCK_PREFIX "cmpxchg %dl,(%rdi)\n\t"				\
	"cmp   $0x1,%al\n\t"						\
	"jne   .slowpath\n\t"						\
	"pop   %rdx\n\t"						\
	FRAME_END							\
	ASM_RET								\
	".slowpath:\n\t"						\
	"push   %rsi\n\t"						\
	"movzbl %al,%esi\n\t"						\
	"call __raw_callee_save___pv_queued_spin_unlock_slowpath\n\t"	\
	"pop    %rsi\n\t"						\
	"pop    %rdx\n\t"						\
	FRAME_END
	ASM_RET
	".slowpath: "
	"push   %rsi;"
	"movzbl %al,%esi;"
	"call " PV_UNLOCK_SLOWPATH ";"
	"pop    %rsi;"
	"pop    %rdx;"
	FRAME_END
	ASM_RET
	".size " PV_UNLOCK ", .-" PV_UNLOCK ";"
	".popsection");

DEFINE_PARAVIRT_ASM(__raw_callee_save___pv_queued_spin_unlock,
		    PV_UNLOCK_ASM, .spinlock.text);

#else /* CONFIG_64BIT */

+6 −13
Original line number Diff line number Diff line
@@ -798,20 +798,13 @@ extern bool __raw_callee_save___kvm_vcpu_is_preempted(long);
 * Hand-optimize version for x86-64 to avoid 8 64-bit register saving and
 * restoring to/from the stack.
 */
asm(
".pushsection .text;"
".global __raw_callee_save___kvm_vcpu_is_preempted;"
".type __raw_callee_save___kvm_vcpu_is_preempted, @function;"
ASM_FUNC_ALIGN
"__raw_callee_save___kvm_vcpu_is_preempted:"
ASM_ENDBR
"movq	__per_cpu_offset(,%rdi,8), %rax;"
"cmpb	$0, " __stringify(KVM_STEAL_TIME_preempted) "+steal_time(%rax);"
"setne	%al;"
ASM_RET
".size __raw_callee_save___kvm_vcpu_is_preempted, .-__raw_callee_save___kvm_vcpu_is_preempted;"
".popsection");
#define PV_VCPU_PREEMPTED_ASM						     \
 "movq   __per_cpu_offset(,%rdi,8), %rax\n\t"				     \
 "cmpb   $0, " __stringify(KVM_STEAL_TIME_preempted) "+steal_time(%rax)\n\t" \
 "setne  %al\n\t"

DEFINE_PARAVIRT_ASM(__raw_callee_save___kvm_vcpu_is_preempted,
		    PV_VCPU_PREEMPTED_ASM, .text);
#endif

static void __init kvm_guest_init(void)
+2 −21
Original line number Diff line number Diff line
@@ -37,29 +37,10 @@
 * nop stub, which must not clobber anything *including the stack* to
 * avoid confusing the entry prologues.
 */
extern void _paravirt_nop(void);
asm (".pushsection .entry.text, \"ax\"\n"
     ".global _paravirt_nop\n"
     ASM_FUNC_ALIGN
     "_paravirt_nop:\n\t"
     ASM_ENDBR
     ASM_RET
     ".size _paravirt_nop, . - _paravirt_nop\n\t"
     ".type _paravirt_nop, @function\n\t"
     ".popsection");
DEFINE_PARAVIRT_ASM(_paravirt_nop, "", .entry.text);

/* stub always returning 0. */
asm (".pushsection .entry.text, \"ax\"\n"
     ".global paravirt_ret0\n"
     ASM_FUNC_ALIGN
     "paravirt_ret0:\n\t"
     ASM_ENDBR
     "xor %" _ASM_AX ", %" _ASM_AX ";\n\t"
     ASM_RET
     ".size paravirt_ret0, . - paravirt_ret0\n\t"
     ".type paravirt_ret0, @function\n\t"
     ".popsection");

DEFINE_PARAVIRT_ASM(paravirt_ret0, "xor %eax,%eax", .entry.text);

void __init default_banner(void)
{