Loading arch/alpha/Kconfig +1 −0 Original line number Diff line number Diff line Loading @@ -15,6 +15,7 @@ config ALPHA select ARCH_WANT_OPTIONAL_GPIOLIB select ARCH_WANT_IPC_PARSE_VERSION select ARCH_HAVE_NMI_SAFE_CMPXCHG select ARCH_HAS_ATOMIC64_DEC_IF_POSITIVE select GENERIC_SMP_IDLE_THREAD select GENERIC_CMOS_UPDATE select GENERIC_STRNCPY_FROM_USER Loading arch/alpha/include/asm/atomic.h +28 −0 Original line number Diff line number Diff line Loading @@ -238,6 +238,34 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) return !c; } /* * atomic64_dec_if_positive - decrement by 1 if old value positive * @v: pointer of type atomic_t * * The function returns the old value of *v minus 1, even if * the atomic variable, v, was not decremented. */ static inline long atomic64_dec_if_positive(atomic64_t *v) { long old, tmp; smp_mb(); __asm__ __volatile__( "1: ldq_l %[old],%[mem]\n" " subq %[old],1,%[tmp]\n" " ble %[old],2f\n" " stq_c %[tmp],%[mem]\n" " beq %[tmp],3f\n" "2:\n" ".subsection 2\n" "3: br 1b\n" ".previous" : [old] "=&r"(old), [tmp] "=&r"(tmp) : [mem] "m"(*v) : "memory"); smp_mb(); return old - 1; } #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) Loading Loading
arch/alpha/Kconfig +1 −0 Original line number Diff line number Diff line Loading @@ -15,6 +15,7 @@ config ALPHA select ARCH_WANT_OPTIONAL_GPIOLIB select ARCH_WANT_IPC_PARSE_VERSION select ARCH_HAVE_NMI_SAFE_CMPXCHG select ARCH_HAS_ATOMIC64_DEC_IF_POSITIVE select GENERIC_SMP_IDLE_THREAD select GENERIC_CMOS_UPDATE select GENERIC_STRNCPY_FROM_USER Loading
arch/alpha/include/asm/atomic.h +28 −0 Original line number Diff line number Diff line Loading @@ -238,6 +238,34 @@ static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) return !c; } /* * atomic64_dec_if_positive - decrement by 1 if old value positive * @v: pointer of type atomic_t * * The function returns the old value of *v minus 1, even if * the atomic variable, v, was not decremented. */ static inline long atomic64_dec_if_positive(atomic64_t *v) { long old, tmp; smp_mb(); __asm__ __volatile__( "1: ldq_l %[old],%[mem]\n" " subq %[old],1,%[tmp]\n" " ble %[old],2f\n" " stq_c %[tmp],%[mem]\n" " beq %[tmp],3f\n" "2:\n" ".subsection 2\n" "3: br 1b\n" ".previous" : [old] "=&r"(old), [tmp] "=&r"(tmp) : [mem] "m"(*v) : "memory"); smp_mb(); return old - 1; } #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0) Loading