Loading arch/mips/Kconfig +11 −0 Original line number Diff line number Diff line Loading @@ -1190,8 +1190,19 @@ config SYS_HAS_CPU_RM9000 config SYS_HAS_CPU_SB1 bool # # CPU may reorder R->R, R->W, W->R, W->W # Reordering beyond LL and SC is handled in WEAK_REORDERING_BEYOND_LLSC # config WEAK_ORDERING bool # # CPU may reorder reads and writes beyond LL/SC # CPU may reorder R->LL, R->LL, W->LL, W->LL, R->SC, R->SC, W->SC, W->SC # config WEAK_REORDERING_BEYOND_LLSC bool endmenu # Loading include/asm-mips/atomic.h +17 −16 Original line number Diff line number Diff line Loading @@ -138,7 +138,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) { unsigned long result; smp_mb(); smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; Loading Loading @@ -181,7 +181,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return result; } Loading @@ -190,7 +190,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) { unsigned long result; smp_mb(); smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; Loading Loading @@ -233,7 +233,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return result; } Loading @@ -250,7 +250,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) { unsigned long result; smp_mb(); smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; Loading Loading @@ -302,7 +302,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return result; } Loading Loading @@ -519,7 +519,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) { unsigned long result; smp_mb(); smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; Loading Loading @@ -562,7 +562,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return result; } Loading @@ -571,7 +571,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) { unsigned long result; smp_mb(); smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; Loading Loading @@ -614,7 +614,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return result; } Loading @@ -631,7 +631,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) { unsigned long result; smp_mb(); smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; Loading Loading @@ -683,7 +683,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return result; } Loading Loading @@ -791,10 +791,11 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) * atomic*_return operations are serializing but not the non-*_return * versions. */ #define smp_mb__before_atomic_dec() smp_mb() #define smp_mb__after_atomic_dec() smp_mb() #define smp_mb__before_atomic_inc() smp_mb() #define smp_mb__after_atomic_inc() smp_mb() #define smp_mb__before_atomic_dec() smp_llsc_mb() #define smp_mb__after_atomic_dec() smp_llsc_mb() #define smp_mb__before_atomic_inc() smp_llsc_mb() #define smp_mb__after_atomic_inc() smp_llsc_mb() #include <asm-generic/atomic.h> #endif /* _ASM_ATOMIC_H */ include/asm-mips/barrier.h +9 −0 Original line number Diff line number Diff line Loading @@ -121,6 +121,11 @@ #else #define __WEAK_ORDERING_MB " \n" #endif #if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP) #define __WEAK_LLSC_MB " sync \n" #else #define __WEAK_LLSC_MB " \n" #endif #define smp_mb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory") #define smp_rmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory") Loading @@ -129,4 +134,8 @@ #define set_mb(var, value) \ do { var = value; smp_mb(); } while (0) #define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory") #define smp_llsc_rmb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory") #define smp_llsc_wmb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory") #endif /* __ASM_BARRIER_H */ include/asm-mips/bitops.h +5 −5 Original line number Diff line number Diff line Loading @@ -38,8 +38,8 @@ /* * clear_bit() doesn't provide any barrier for the compiler. */ #define smp_mb__before_clear_bit() smp_mb() #define smp_mb__after_clear_bit() smp_mb() #define smp_mb__before_clear_bit() smp_llsc_mb() #define smp_mb__after_clear_bit() smp_llsc_mb() /* * set_bit - Atomically set a bit in memory Loading Loading @@ -289,7 +289,7 @@ static inline int test_and_set_bit(unsigned long nr, raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return res != 0; } Loading Loading @@ -377,7 +377,7 @@ static inline int test_and_clear_bit(unsigned long nr, raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return res != 0; } Loading Loading @@ -445,7 +445,7 @@ static inline int test_and_change_bit(unsigned long nr, raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return res != 0; } Loading include/asm-mips/futex.h +4 −4 Original line number Diff line number Diff line Loading @@ -29,7 +29,7 @@ " .set mips3 \n" \ "2: sc $1, %2 \n" \ " beqzl $1, 1b \n" \ __WEAK_ORDERING_MB \ __WEAK_LLSC_MB \ "3: \n" \ " .set pop \n" \ " .set mips0 \n" \ Loading @@ -55,7 +55,7 @@ " .set mips3 \n" \ "2: sc $1, %2 \n" \ " beqz $1, 1b \n" \ __WEAK_ORDERING_MB \ __WEAK_LLSC_MB \ "3: \n" \ " .set pop \n" \ " .set mips0 \n" \ Loading Loading @@ -152,7 +152,7 @@ futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval) " .set mips3 \n" "2: sc $1, %1 \n" " beqzl $1, 1b \n" __WEAK_ORDERING_MB __WEAK_LLSC_MB "3: \n" " .set pop \n" " .section .fixup,\"ax\" \n" Loading @@ -179,7 +179,7 @@ futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval) " .set mips3 \n" "2: sc $1, %1 \n" " beqz $1, 1b \n" __WEAK_ORDERING_MB __WEAK_LLSC_MB "3: \n" " .set pop \n" " .section .fixup,\"ax\" \n" Loading Loading
arch/mips/Kconfig +11 −0 Original line number Diff line number Diff line Loading @@ -1190,8 +1190,19 @@ config SYS_HAS_CPU_RM9000 config SYS_HAS_CPU_SB1 bool # # CPU may reorder R->R, R->W, W->R, W->W # Reordering beyond LL and SC is handled in WEAK_REORDERING_BEYOND_LLSC # config WEAK_ORDERING bool # # CPU may reorder reads and writes beyond LL/SC # CPU may reorder R->LL, R->LL, W->LL, W->LL, R->SC, R->SC, W->SC, W->SC # config WEAK_REORDERING_BEYOND_LLSC bool endmenu # Loading
include/asm-mips/atomic.h +17 −16 Original line number Diff line number Diff line Loading @@ -138,7 +138,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) { unsigned long result; smp_mb(); smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; Loading Loading @@ -181,7 +181,7 @@ static __inline__ int atomic_add_return(int i, atomic_t * v) raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return result; } Loading @@ -190,7 +190,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) { unsigned long result; smp_mb(); smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; Loading Loading @@ -233,7 +233,7 @@ static __inline__ int atomic_sub_return(int i, atomic_t * v) raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return result; } Loading @@ -250,7 +250,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) { unsigned long result; smp_mb(); smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; Loading Loading @@ -302,7 +302,7 @@ static __inline__ int atomic_sub_if_positive(int i, atomic_t * v) raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return result; } Loading Loading @@ -519,7 +519,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) { unsigned long result; smp_mb(); smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; Loading Loading @@ -562,7 +562,7 @@ static __inline__ long atomic64_add_return(long i, atomic64_t * v) raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return result; } Loading @@ -571,7 +571,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) { unsigned long result; smp_mb(); smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; Loading Loading @@ -614,7 +614,7 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return result; } Loading @@ -631,7 +631,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) { unsigned long result; smp_mb(); smp_llsc_mb(); if (cpu_has_llsc && R10000_LLSC_WAR) { unsigned long temp; Loading Loading @@ -683,7 +683,7 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return result; } Loading Loading @@ -791,10 +791,11 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) * atomic*_return operations are serializing but not the non-*_return * versions. */ #define smp_mb__before_atomic_dec() smp_mb() #define smp_mb__after_atomic_dec() smp_mb() #define smp_mb__before_atomic_inc() smp_mb() #define smp_mb__after_atomic_inc() smp_mb() #define smp_mb__before_atomic_dec() smp_llsc_mb() #define smp_mb__after_atomic_dec() smp_llsc_mb() #define smp_mb__before_atomic_inc() smp_llsc_mb() #define smp_mb__after_atomic_inc() smp_llsc_mb() #include <asm-generic/atomic.h> #endif /* _ASM_ATOMIC_H */
include/asm-mips/barrier.h +9 −0 Original line number Diff line number Diff line Loading @@ -121,6 +121,11 @@ #else #define __WEAK_ORDERING_MB " \n" #endif #if defined(CONFIG_WEAK_REORDERING_BEYOND_LLSC) && defined(CONFIG_SMP) #define __WEAK_LLSC_MB " sync \n" #else #define __WEAK_LLSC_MB " \n" #endif #define smp_mb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory") #define smp_rmb() __asm__ __volatile__(__WEAK_ORDERING_MB : : :"memory") Loading @@ -129,4 +134,8 @@ #define set_mb(var, value) \ do { var = value; smp_mb(); } while (0) #define smp_llsc_mb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory") #define smp_llsc_rmb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory") #define smp_llsc_wmb() __asm__ __volatile__(__WEAK_LLSC_MB : : :"memory") #endif /* __ASM_BARRIER_H */
include/asm-mips/bitops.h +5 −5 Original line number Diff line number Diff line Loading @@ -38,8 +38,8 @@ /* * clear_bit() doesn't provide any barrier for the compiler. */ #define smp_mb__before_clear_bit() smp_mb() #define smp_mb__after_clear_bit() smp_mb() #define smp_mb__before_clear_bit() smp_llsc_mb() #define smp_mb__after_clear_bit() smp_llsc_mb() /* * set_bit - Atomically set a bit in memory Loading Loading @@ -289,7 +289,7 @@ static inline int test_and_set_bit(unsigned long nr, raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return res != 0; } Loading Loading @@ -377,7 +377,7 @@ static inline int test_and_clear_bit(unsigned long nr, raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return res != 0; } Loading Loading @@ -445,7 +445,7 @@ static inline int test_and_change_bit(unsigned long nr, raw_local_irq_restore(flags); } smp_mb(); smp_llsc_mb(); return res != 0; } Loading
include/asm-mips/futex.h +4 −4 Original line number Diff line number Diff line Loading @@ -29,7 +29,7 @@ " .set mips3 \n" \ "2: sc $1, %2 \n" \ " beqzl $1, 1b \n" \ __WEAK_ORDERING_MB \ __WEAK_LLSC_MB \ "3: \n" \ " .set pop \n" \ " .set mips0 \n" \ Loading @@ -55,7 +55,7 @@ " .set mips3 \n" \ "2: sc $1, %2 \n" \ " beqz $1, 1b \n" \ __WEAK_ORDERING_MB \ __WEAK_LLSC_MB \ "3: \n" \ " .set pop \n" \ " .set mips0 \n" \ Loading Loading @@ -152,7 +152,7 @@ futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval) " .set mips3 \n" "2: sc $1, %1 \n" " beqzl $1, 1b \n" __WEAK_ORDERING_MB __WEAK_LLSC_MB "3: \n" " .set pop \n" " .section .fixup,\"ax\" \n" Loading @@ -179,7 +179,7 @@ futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval) " .set mips3 \n" "2: sc $1, %1 \n" " beqz $1, 1b \n" __WEAK_ORDERING_MB __WEAK_LLSC_MB "3: \n" " .set pop \n" " .section .fixup,\"ax\" \n" Loading