Loading arch/arm64/kernel/cpu-reset.S +1 −1 Original line number Diff line number Diff line Loading @@ -32,7 +32,7 @@ ENTRY(__cpu_soft_restart) /* Clear sctlr_el1 flags. */ mrs x12, sctlr_el1 ldr x13, =SCTLR_ELx_FLAGS mov_q x13, SCTLR_ELx_FLAGS bic x12, x12, x13 pre_disable_mmu_workaround msr sctlr_el1, x12 Loading arch/arm64/kernel/head.S +7 −5 Original line number Diff line number Diff line Loading @@ -457,17 +457,19 @@ SYM_FUNC_START_LOCAL(__primary_switched) b start_kernel SYM_FUNC_END(__primary_switched) .pushsection ".rodata", "a" SYM_DATA_START(kimage_vaddr) .quad _text - TEXT_OFFSET SYM_DATA_END(kimage_vaddr) EXPORT_SYMBOL(kimage_vaddr) .popsection /* * end early head section, begin head code that is also used for * hotplug and needs to have the same protections as the text region */ .section ".idmap.text","awx" SYM_DATA_START(kimage_vaddr) .quad _text - TEXT_OFFSET SYM_DATA_END(kimage_vaddr) EXPORT_SYMBOL(kimage_vaddr) /* * If we're fortunate enough to boot at EL2, ensure that the world is * sane before dropping to EL1. Loading arch/arm64/kernel/hyp-stub.S +1 −1 Original line number Diff line number Diff line Loading @@ -63,7 +63,7 @@ el1_sync: beq 9f // Nothing to reset! /* Someone called kvm_call_hyp() against the hyp-stub... */ ldr x0, =HVC_STUB_ERR mov_q x0, HVC_STUB_ERR eret 9: mov x0, xzr Loading arch/arm64/kernel/relocate_kernel.S +1 −3 Original line number Diff line number Diff line Loading @@ -41,7 +41,7 @@ ENTRY(arm64_relocate_new_kernel) cmp x0, #CurrentEL_EL2 b.ne 1f mrs x0, sctlr_el2 ldr x1, =SCTLR_ELx_FLAGS mov_q x1, SCTLR_ELx_FLAGS bic x0, x0, x1 pre_disable_mmu_workaround msr sctlr_el2, x0 Loading Loading @@ -113,8 +113,6 @@ ENTRY(arm64_relocate_new_kernel) ENDPROC(arm64_relocate_new_kernel) .ltorg .align 3 /* To keep the 64-bit values below naturally aligned. */ .Lcopy_end: Loading arch/arm64/kvm/hyp-init.S +4 −6 Original line number Diff line number Diff line Loading @@ -60,7 +60,7 @@ alternative_else_nop_endif msr ttbr0_el2, x4 mrs x4, tcr_el1 ldr x5, =TCR_EL2_MASK mov_q x5, TCR_EL2_MASK and x4, x4, x5 mov x5, #TCR_EL2_RES1 orr x4, x4, x5 Loading Loading @@ -102,7 +102,7 @@ alternative_else_nop_endif * as well as the EE bit on BE. Drop the A flag since the compiler * is allowed to generate unaligned accesses. */ ldr x4, =(SCTLR_EL2_RES1 | (SCTLR_ELx_FLAGS & ~SCTLR_ELx_A)) mov_q x4, (SCTLR_EL2_RES1 | (SCTLR_ELx_FLAGS & ~SCTLR_ELx_A)) CPU_BE( orr x4, x4, #SCTLR_ELx_EE) msr sctlr_el2, x4 isb Loading Loading @@ -142,7 +142,7 @@ reset: * case we coming via HVC_SOFT_RESTART. */ mrs x5, sctlr_el2 ldr x6, =SCTLR_ELx_FLAGS mov_q x6, SCTLR_ELx_FLAGS bic x5, x5, x6 // Clear SCTL_M and etc pre_disable_mmu_workaround msr sctlr_el2, x5 Loading @@ -155,11 +155,9 @@ reset: eret 1: /* Bad stub call */ ldr x0, =HVC_STUB_ERR mov_q x0, HVC_STUB_ERR eret SYM_CODE_END(__kvm_handle_stub_hvc) .ltorg .popsection Loading
arch/arm64/kernel/cpu-reset.S +1 −1 Original line number Diff line number Diff line Loading @@ -32,7 +32,7 @@ ENTRY(__cpu_soft_restart) /* Clear sctlr_el1 flags. */ mrs x12, sctlr_el1 ldr x13, =SCTLR_ELx_FLAGS mov_q x13, SCTLR_ELx_FLAGS bic x12, x12, x13 pre_disable_mmu_workaround msr sctlr_el1, x12 Loading
arch/arm64/kernel/head.S +7 −5 Original line number Diff line number Diff line Loading @@ -457,17 +457,19 @@ SYM_FUNC_START_LOCAL(__primary_switched) b start_kernel SYM_FUNC_END(__primary_switched) .pushsection ".rodata", "a" SYM_DATA_START(kimage_vaddr) .quad _text - TEXT_OFFSET SYM_DATA_END(kimage_vaddr) EXPORT_SYMBOL(kimage_vaddr) .popsection /* * end early head section, begin head code that is also used for * hotplug and needs to have the same protections as the text region */ .section ".idmap.text","awx" SYM_DATA_START(kimage_vaddr) .quad _text - TEXT_OFFSET SYM_DATA_END(kimage_vaddr) EXPORT_SYMBOL(kimage_vaddr) /* * If we're fortunate enough to boot at EL2, ensure that the world is * sane before dropping to EL1. Loading
arch/arm64/kernel/hyp-stub.S +1 −1 Original line number Diff line number Diff line Loading @@ -63,7 +63,7 @@ el1_sync: beq 9f // Nothing to reset! /* Someone called kvm_call_hyp() against the hyp-stub... */ ldr x0, =HVC_STUB_ERR mov_q x0, HVC_STUB_ERR eret 9: mov x0, xzr Loading
arch/arm64/kernel/relocate_kernel.S +1 −3 Original line number Diff line number Diff line Loading @@ -41,7 +41,7 @@ ENTRY(arm64_relocate_new_kernel) cmp x0, #CurrentEL_EL2 b.ne 1f mrs x0, sctlr_el2 ldr x1, =SCTLR_ELx_FLAGS mov_q x1, SCTLR_ELx_FLAGS bic x0, x0, x1 pre_disable_mmu_workaround msr sctlr_el2, x0 Loading Loading @@ -113,8 +113,6 @@ ENTRY(arm64_relocate_new_kernel) ENDPROC(arm64_relocate_new_kernel) .ltorg .align 3 /* To keep the 64-bit values below naturally aligned. */ .Lcopy_end: Loading
arch/arm64/kvm/hyp-init.S +4 −6 Original line number Diff line number Diff line Loading @@ -60,7 +60,7 @@ alternative_else_nop_endif msr ttbr0_el2, x4 mrs x4, tcr_el1 ldr x5, =TCR_EL2_MASK mov_q x5, TCR_EL2_MASK and x4, x4, x5 mov x5, #TCR_EL2_RES1 orr x4, x4, x5 Loading Loading @@ -102,7 +102,7 @@ alternative_else_nop_endif * as well as the EE bit on BE. Drop the A flag since the compiler * is allowed to generate unaligned accesses. */ ldr x4, =(SCTLR_EL2_RES1 | (SCTLR_ELx_FLAGS & ~SCTLR_ELx_A)) mov_q x4, (SCTLR_EL2_RES1 | (SCTLR_ELx_FLAGS & ~SCTLR_ELx_A)) CPU_BE( orr x4, x4, #SCTLR_ELx_EE) msr sctlr_el2, x4 isb Loading Loading @@ -142,7 +142,7 @@ reset: * case we coming via HVC_SOFT_RESTART. */ mrs x5, sctlr_el2 ldr x6, =SCTLR_ELx_FLAGS mov_q x6, SCTLR_ELx_FLAGS bic x5, x5, x6 // Clear SCTL_M and etc pre_disable_mmu_workaround msr sctlr_el2, x5 Loading @@ -155,11 +155,9 @@ reset: eret 1: /* Bad stub call */ ldr x0, =HVC_STUB_ERR mov_q x0, HVC_STUB_ERR eret SYM_CODE_END(__kvm_handle_stub_hvc) .ltorg .popsection