Commit 4e918ab1 authored by Ard Biesheuvel's avatar Ard Biesheuvel
Browse files

ARM: assembler: add optimized ldr/str macros to load variables from memory



We will be adding variable loads to various hot paths, so it makes sense
to add a helper macro that can load variables from asm code without the
use of literal pool entries. On v7 or later, we can simply use MOVW/MOVT
pairs, but on earlier cores, this requires a bit of hackery to emit a
instruction sequence that implements this using a sequence of ADD/LDR
instructions.

Acked-by: default avatarLinus Walleij <linus.walleij@linaro.org>
Acked-by: default avatarNicolas Pitre <nico@fluxnic.net>
Signed-off-by: default avatarArd Biesheuvel <ardb@kernel.org>
Tested-by: default avatarMarc Zyngier <maz@kernel.org>
Tested-by: Vladimir Murzin <vladimir.murzin@arm.com> # ARMv7M
parent 1fa8c4b1
Loading
Loading
Loading
Loading
+41 −4
Original line number Diff line number Diff line
@@ -568,12 +568,12 @@ THUMB( orr \reg , \reg , #PSR_T_BIT )
	/*
	 * mov_l - move a constant value or [relocated] address into a register
	 */
	.macro		mov_l, dst:req, imm:req
	.macro		mov_l, dst:req, imm:req, cond
	.if		__LINUX_ARM_ARCH__ < 7
	ldr		\dst, =\imm
	ldr\cond	\dst, =\imm
	.else
	movw		\dst, #:lower16:\imm
	movt		\dst, #:upper16:\imm
	movw\cond	\dst, #:lower16:\imm
	movt\cond	\dst, #:upper16:\imm
	.endif
	.endm

@@ -611,6 +611,43 @@ THUMB( orr \reg , \reg , #PSR_T_BIT )
	__adldst_l	str, \src, \sym, \tmp, \cond
	.endm

	.macro		__ldst_va, op, reg, tmp, sym, cond
#if __LINUX_ARM_ARCH__ >= 7 || \
    (defined(MODULE) && defined(CONFIG_ARM_MODULE_PLTS)) || \
    (defined(CONFIG_LD_IS_LLD) && CONFIG_LLD_VERSION < 140000)
	mov_l		\tmp, \sym, \cond
	\op\cond	\reg, [\tmp]
#else
	/*
	 * Avoid a literal load, by emitting a sequence of ADD/LDR instructions
	 * with the appropriate relocations. The combined sequence has a range
	 * of -/+ 256 MiB, which should be sufficient for the core kernel and
	 * for modules loaded into the module region.
	 */
	.globl		\sym
	.reloc		.L0_\@, R_ARM_ALU_PC_G0_NC, \sym
	.reloc		.L1_\@, R_ARM_ALU_PC_G1_NC, \sym
	.reloc		.L2_\@, R_ARM_LDR_PC_G2, \sym
.L0_\@: sub\cond	\tmp, pc, #8
.L1_\@: sub\cond	\tmp, \tmp, #4
.L2_\@: \op\cond	\reg, [\tmp, #0]
#endif
	.endm

	/*
	 * ldr_va - load a 32-bit word from the virtual address of \sym
	 */
	.macro		ldr_va, rd:req, sym:req, cond
	__ldst_va	ldr, \rd, \rd, \sym, \cond
	.endm

	/*
	 * str_va - store a 32-bit word to the virtual address of \sym
	 */
	.macro		str_va, rn:req, sym:req, tmp:req, cond
	__ldst_va	str, \rn, \tmp, \sym, \cond
	.endm

	/*
	 * rev_l - byte-swap a 32-bit value
	 *
+1 −1
Original line number Diff line number Diff line
@@ -53,7 +53,7 @@ UNWIND( .setfp fpreg, sp )
	subs	r2, sp, r0		@ SP above bottom of IRQ stack?
	rsbscs	r2, r2, #THREAD_SIZE	@ ... and below the top?
#ifdef CONFIG_VMAP_STACK
	ldr_l	r2, high_memory, cc	@ End of the linear region
	ldr_va	r2, high_memory, cc	@ End of the linear region
	cmpcc	r2, r0			@ Stack pointer was below it?
#endif
	movcs	sp, r0			@ If so, revert to incoming SP
+1 −1
Original line number Diff line number Diff line
@@ -445,7 +445,7 @@ THUMB( it ne )
	@ in such cases so just carry on.
	@
	str	ip, [r0, #12]			@ Stash IP on the mode stack
	ldr_l	ip, high_memory			@ Start of VMALLOC space
	ldr_va	ip, high_memory			@ Start of VMALLOC space
ARM(	cmp	sp, ip			)	@ SP in vmalloc space?
THUMB(	cmp	r1, ip			)
THUMB(	itt	lo			)