Commit d40a9b0a authored by Youling Tang's avatar Youling Tang Committed by Hongchen Zhang
Browse files

LoongArch: Add la_abs macro implementation

LoongArch inclusion
category: feature
bugzilla: https://gitee.com/openeuler/kernel/issues/I736HO



--------------------------------

Use the "la_abs macro" instead of the "la.abs pseudo instruction" to
prepare for the subsequent PIE kernel. When PIE is not enabled, la_abs
is equivalent to la.abs.

Signed-off-by: default avatarYouling Tang <tangyouling@loongson.cn>
Change-Id: I640026372b7fe03af662aa7fa980427ab860fe5c
parent c066ef2f
Loading
Loading
Loading
Loading
+4 −0
Original line number Diff line number Diff line
@@ -898,4 +898,8 @@
	nor	\dst, \src, zero
.endm

.macro la_abs reg, sym
	la.abs	\reg, \sym
.endm

#endif /* _ASM_ASMMACRO_H */
+1 −1
Original line number Diff line number Diff line
@@ -86,7 +86,7 @@
 * new value in sp.
 */
	.macro	get_saved_sp docfi=0
	la.abs	  t1, kernelsp
	la_abs	  t1, kernelsp
#ifdef CONFIG_SMP
	csrrd	  t0, PERCPU_BASE_KS
	LONG_ADD  t1, t1, t0
+4 −4
Original line number Diff line number Diff line
@@ -34,7 +34,7 @@ SYM_FUNC_END(__arch_cpu_idle)
SYM_FUNC_START(handle_vint)
	BACKUP_T0T1
	SAVE_ALL
	la.abs	t1, __arch_cpu_idle
	la_abs	t1, __arch_cpu_idle
	LONG_L	t0, sp, PT_ERA
	/* 32 byte rollback region */
	ori	t0, t0, 0x1f
@@ -43,7 +43,7 @@ SYM_FUNC_START(handle_vint)
	LONG_S	t0, sp, PT_ERA
1:	move	a0, sp
	move	a1, sp
	la.abs	t0, do_vint
	la_abs	t0, do_vint
	jirl	ra, t0, 0
	RESTORE_ALL_AND_RET
SYM_FUNC_END(handle_vint)
@@ -71,7 +71,7 @@ SYM_FUNC_END(except_vec_cex)
	SAVE_ALL
	build_prep_\prep
	move	a0, sp
	la.abs	t0, do_\handler
	la_abs	t0, do_\handler
	jirl	ra, t0, 0
	RESTORE_ALL_AND_RET
	SYM_FUNC_END(handle_\exception)
@@ -90,6 +90,6 @@ SYM_FUNC_END(except_vec_cex)
	BUILD_HANDLER reserved reserved none	/* others */

SYM_FUNC_START(handle_sys)
	la.abs	t0, handle_syscall
	la_abs	t0, handle_syscall
	jr	t0
SYM_FUNC_END(handle_sys)
+7 −7
Original line number Diff line number Diff line
@@ -32,7 +32,7 @@ SYM_FUNC_START(handle_tlb_protect)
	move	a1, zero
	csrrd	a2, LOONGARCH_CSR_BADV
	REG_S	a2, sp, PT_BVADDR
	la.abs	t0, do_page_fault
	la_abs	t0, do_page_fault
	jirl	ra, t0, 0
	RESTORE_ALL_AND_RET
SYM_FUNC_END(handle_tlb_protect)
@@ -122,7 +122,7 @@ leave_load:
	ertn
#ifdef CONFIG_64BIT
vmalloc_load:
	la.abs	t1, swapper_pg_dir
	la_abs	t1, swapper_pg_dir
	b	vmalloc_done_load
#endif

@@ -196,7 +196,7 @@ tlb_huge_update_load:
nopage_tlb_load:
	dbar	0
	csrrd	ra, EXCEPTION_KS2
	la.abs	t0, tlb_do_page_fault_0
	la_abs	t0, tlb_do_page_fault_0
	jr	t0
SYM_FUNC_END(handle_tlb_load)

@@ -288,7 +288,7 @@ leave_store:
	ertn
#ifdef CONFIG_64BIT
vmalloc_store:
	la.abs	t1, swapper_pg_dir
	la_abs	t1, swapper_pg_dir
	b	vmalloc_done_store
#endif

@@ -364,7 +364,7 @@ tlb_huge_update_store:
nopage_tlb_store:
	dbar	0
	csrrd	ra, EXCEPTION_KS2
	la.abs	t0, tlb_do_page_fault_1
	la_abs	t0, tlb_do_page_fault_1
	jr	t0
SYM_FUNC_END(handle_tlb_store)

@@ -453,7 +453,7 @@ leave_modify:
	ertn
#ifdef CONFIG_64BIT
vmalloc_modify:
	la.abs	t1, swapper_pg_dir
	la_abs	t1, swapper_pg_dir
	b	vmalloc_done_modify
#endif

@@ -523,7 +523,7 @@ tlb_huge_update_modify:
nopage_tlb_modify:
	dbar	0
	csrrd	ra, EXCEPTION_KS2
	la.abs	t0, tlb_do_page_fault_1
	la_abs	t0, tlb_do_page_fault_1
	jr	t0
SYM_FUNC_END(handle_tlb_modify)