Loading arch/powerpc/include/asm/processor.h +0 −2 Original line number Diff line number Diff line Loading @@ -380,8 +380,6 @@ extern int set_endian(struct task_struct *tsk, unsigned int val); extern int get_unalign_ctl(struct task_struct *tsk, unsigned long adr); extern int set_unalign_ctl(struct task_struct *tsk, unsigned int val); extern void fp_enable(void); extern void vec_enable(void); extern void load_fp_state(struct thread_fp_state *fp); extern void store_fp_state(struct thread_fp_state *fp); extern void load_vr_state(struct thread_vr_state *vr); Loading arch/powerpc/kernel/fpu.S +0 −16 Original line number Diff line number Diff line Loading @@ -76,22 +76,6 @@ END_FTR_SECTION_IFSET(CPU_FTR_VSX) blr #endif /* CONFIG_PPC_TRANSACTIONAL_MEM */ /* * Enable use of the FPU, and VSX if possible, for the caller. */ _GLOBAL(fp_enable) mfmsr r3 ori r3,r3,MSR_FP #ifdef CONFIG_VSX BEGIN_FTR_SECTION oris r3,r3,MSR_VSX@h END_FTR_SECTION_IFSET(CPU_FTR_VSX) #endif SYNC MTMSRD(r3) isync /* (not necessary for arch 2.02 and later) */ blr /* * Load state from memory into FP registers including FPSCR. * Assumes the caller has enabled FP in the MSR. Loading arch/powerpc/kernel/process.c +4 −2 Original line number Diff line number Diff line Loading @@ -732,13 +732,15 @@ void restore_tm_state(struct pt_regs *regs) msr_diff = current->thread.ckpt_regs.msr & ~regs->msr; msr_diff &= MSR_FP | MSR_VEC | MSR_VSX; if (msr_diff & MSR_FP) { fp_enable(); msr_check_and_set(MSR_FP); load_fp_state(¤t->thread.fp_state); msr_check_and_clear(MSR_FP); regs->msr |= current->thread.fpexc_mode; } if (msr_diff & MSR_VEC) { vec_enable(); msr_check_and_set(MSR_VEC); load_vr_state(¤t->thread.vr_state); msr_check_and_clear(MSR_VEC); } regs->msr |= msr_diff; } Loading arch/powerpc/kernel/vector.S +0 −10 Original line number Diff line number Diff line Loading @@ -32,16 +32,6 @@ _GLOBAL(do_load_up_transact_altivec) blr #endif /* * Enable use of VMX/Altivec for the caller. */ _GLOBAL(vec_enable) mfmsr r3 oris r3,r3,MSR_VEC@h MTMSRD(r3) isync blr /* * Load state from memory into VMX registers including VSCR. * Assumes the caller has enabled VMX in the MSR. Loading Loading
arch/powerpc/include/asm/processor.h +0 −2 Original line number Diff line number Diff line Loading @@ -380,8 +380,6 @@ extern int set_endian(struct task_struct *tsk, unsigned int val); extern int get_unalign_ctl(struct task_struct *tsk, unsigned long adr); extern int set_unalign_ctl(struct task_struct *tsk, unsigned int val); extern void fp_enable(void); extern void vec_enable(void); extern void load_fp_state(struct thread_fp_state *fp); extern void store_fp_state(struct thread_fp_state *fp); extern void load_vr_state(struct thread_vr_state *vr); Loading
arch/powerpc/kernel/fpu.S +0 −16 Original line number Diff line number Diff line Loading @@ -76,22 +76,6 @@ END_FTR_SECTION_IFSET(CPU_FTR_VSX) blr #endif /* CONFIG_PPC_TRANSACTIONAL_MEM */ /* * Enable use of the FPU, and VSX if possible, for the caller. */ _GLOBAL(fp_enable) mfmsr r3 ori r3,r3,MSR_FP #ifdef CONFIG_VSX BEGIN_FTR_SECTION oris r3,r3,MSR_VSX@h END_FTR_SECTION_IFSET(CPU_FTR_VSX) #endif SYNC MTMSRD(r3) isync /* (not necessary for arch 2.02 and later) */ blr /* * Load state from memory into FP registers including FPSCR. * Assumes the caller has enabled FP in the MSR. Loading
arch/powerpc/kernel/process.c +4 −2 Original line number Diff line number Diff line Loading @@ -732,13 +732,15 @@ void restore_tm_state(struct pt_regs *regs) msr_diff = current->thread.ckpt_regs.msr & ~regs->msr; msr_diff &= MSR_FP | MSR_VEC | MSR_VSX; if (msr_diff & MSR_FP) { fp_enable(); msr_check_and_set(MSR_FP); load_fp_state(¤t->thread.fp_state); msr_check_and_clear(MSR_FP); regs->msr |= current->thread.fpexc_mode; } if (msr_diff & MSR_VEC) { vec_enable(); msr_check_and_set(MSR_VEC); load_vr_state(¤t->thread.vr_state); msr_check_and_clear(MSR_VEC); } regs->msr |= msr_diff; } Loading
arch/powerpc/kernel/vector.S +0 −10 Original line number Diff line number Diff line Loading @@ -32,16 +32,6 @@ _GLOBAL(do_load_up_transact_altivec) blr #endif /* * Enable use of VMX/Altivec for the caller. */ _GLOBAL(vec_enable) mfmsr r3 oris r3,r3,MSR_VEC@h MTMSRD(r3) isync blr /* * Load state from memory into VMX registers including VSCR. * Assumes the caller has enabled VMX in the MSR. Loading