Loading kernel/sched/core.c +1 −0 Original line number Original line Diff line number Diff line Loading @@ -1962,6 +1962,7 @@ static void finish_task_switch(struct rq *rq, struct task_struct *prev) local_irq_enable(); local_irq_enable(); #endif /* __ARCH_WANT_INTERRUPTS_ON_CTXSW */ #endif /* __ARCH_WANT_INTERRUPTS_ON_CTXSW */ finish_lock_switch(rq, prev); finish_lock_switch(rq, prev); finish_arch_post_lock_switch(); fire_sched_in_preempt_notifiers(current); fire_sched_in_preempt_notifiers(current); if (mm) if (mm) Loading kernel/sched/sched.h +3 −0 Original line number Original line Diff line number Diff line Loading @@ -681,6 +681,9 @@ static inline int task_running(struct rq *rq, struct task_struct *p) #ifndef finish_arch_switch #ifndef finish_arch_switch # define finish_arch_switch(prev) do { } while (0) # define finish_arch_switch(prev) do { } while (0) #endif #endif #ifndef finish_arch_post_lock_switch # define finish_arch_post_lock_switch() do { } while (0) #endif #ifndef __ARCH_WANT_UNLOCKED_CTXSW #ifndef __ARCH_WANT_UNLOCKED_CTXSW static inline void prepare_lock_switch(struct rq *rq, struct task_struct *next) static inline void prepare_lock_switch(struct rq *rq, struct task_struct *next) Loading Loading
kernel/sched/core.c +1 −0 Original line number Original line Diff line number Diff line Loading @@ -1962,6 +1962,7 @@ static void finish_task_switch(struct rq *rq, struct task_struct *prev) local_irq_enable(); local_irq_enable(); #endif /* __ARCH_WANT_INTERRUPTS_ON_CTXSW */ #endif /* __ARCH_WANT_INTERRUPTS_ON_CTXSW */ finish_lock_switch(rq, prev); finish_lock_switch(rq, prev); finish_arch_post_lock_switch(); fire_sched_in_preempt_notifiers(current); fire_sched_in_preempt_notifiers(current); if (mm) if (mm) Loading
kernel/sched/sched.h +3 −0 Original line number Original line Diff line number Diff line Loading @@ -681,6 +681,9 @@ static inline int task_running(struct rq *rq, struct task_struct *p) #ifndef finish_arch_switch #ifndef finish_arch_switch # define finish_arch_switch(prev) do { } while (0) # define finish_arch_switch(prev) do { } while (0) #endif #endif #ifndef finish_arch_post_lock_switch # define finish_arch_post_lock_switch() do { } while (0) #endif #ifndef __ARCH_WANT_UNLOCKED_CTXSW #ifndef __ARCH_WANT_UNLOCKED_CTXSW static inline void prepare_lock_switch(struct rq *rq, struct task_struct *next) static inline void prepare_lock_switch(struct rq *rq, struct task_struct *next) Loading