|
@@ -360,3 +360,107 @@ static inline u64 atomic64_cmpxchg(atomic64_t *ptr, u64 old, u64 new)
|
|
|
do {
|
|
|
__asm__ __volatile__("@ atomic64_cmpxchg\n"
|
|
|
"ldrexd %1, %H1, [%3]\n"
|
|
|
+ "mov %0, #0\n"
|
|
|
+ "teq %1, %4\n"
|
|
|
+ "teqeq %H1, %H4\n"
|
|
|
+ "strexdeq %0, %5, %H5, [%3]"
|
|
|
+ : "=&r" (res), "=&r" (oldval), "+Qo" (ptr->counter)
|
|
|
+ : "r" (&ptr->counter), "r" (old), "r" (new)
|
|
|
+ : "cc");
|
|
|
+ } while (res);
|
|
|
+
|
|
|
+ smp_mb();
|
|
|
+
|
|
|
+ return oldval;
|
|
|
+}
|
|
|
+
|
|
|
+static inline u64 atomic64_xchg(atomic64_t *ptr, u64 new)
|
|
|
+{
|
|
|
+ u64 result;
|
|
|
+ unsigned long tmp;
|
|
|
+
|
|
|
+ smp_mb();
|
|
|
+
|
|
|
+ __asm__ __volatile__("@ atomic64_xchg\n"
|
|
|
+"1: ldrexd %0, %H0, [%3]\n"
|
|
|
+" strexd %1, %4, %H4, [%3]\n"
|
|
|
+" teq %1, #0\n"
|
|
|
+" bne 1b"
|
|
|
+ : "=&r" (result), "=&r" (tmp), "+Qo" (ptr->counter)
|
|
|
+ : "r" (&ptr->counter), "r" (new)
|
|
|
+ : "cc");
|
|
|
+
|
|
|
+ smp_mb();
|
|
|
+
|
|
|
+ return result;
|
|
|
+}
|
|
|
+
|
|
|
+static inline u64 atomic64_dec_if_positive(atomic64_t *v)
|
|
|
+{
|
|
|
+ u64 result;
|
|
|
+ unsigned long tmp;
|
|
|
+
|
|
|
+ smp_mb();
|
|
|
+
|
|
|
+ __asm__ __volatile__("@ atomic64_dec_if_positive\n"
|
|
|
+"1: ldrexd %0, %H0, [%3]\n"
|
|
|
+" subs %0, %0, #1\n"
|
|
|
+" sbc %H0, %H0, #0\n"
|
|
|
+" teq %H0, #0\n"
|
|
|
+" bmi 2f\n"
|
|
|
+" strexd %1, %0, %H0, [%3]\n"
|
|
|
+" teq %1, #0\n"
|
|
|
+" bne 1b\n"
|
|
|
+"2:"
|
|
|
+ : "=&r" (result), "=&r" (tmp), "+Qo" (v->counter)
|
|
|
+ : "r" (&v->counter)
|
|
|
+ : "cc");
|
|
|
+
|
|
|
+ smp_mb();
|
|
|
+
|
|
|
+ return result;
|
|
|
+}
|
|
|
+
|
|
|
+static inline int atomic64_add_unless(atomic64_t *v, u64 a, u64 u)
|
|
|
+{
|
|
|
+ u64 val;
|
|
|
+ unsigned long tmp;
|
|
|
+ int ret = 1;
|
|
|
+
|
|
|
+ smp_mb();
|
|
|
+
|
|
|
+ __asm__ __volatile__("@ atomic64_add_unless\n"
|
|
|
+"1: ldrexd %0, %H0, [%4]\n"
|
|
|
+" teq %0, %5\n"
|
|
|
+" teqeq %H0, %H5\n"
|
|
|
+" moveq %1, #0\n"
|
|
|
+" beq 2f\n"
|
|
|
+" adds %0, %0, %6\n"
|
|
|
+" adc %H0, %H0, %H6\n"
|
|
|
+" strexd %2, %0, %H0, [%4]\n"
|
|
|
+" teq %2, #0\n"
|
|
|
+" bne 1b\n"
|
|
|
+"2:"
|
|
|
+ : "=&r" (val), "+r" (ret), "=&r" (tmp), "+Qo" (v->counter)
|
|
|
+ : "r" (&v->counter), "r" (u), "r" (a)
|
|
|
+ : "cc");
|
|
|
+
|
|
|
+ if (ret)
|
|
|
+ smp_mb();
|
|
|
+
|
|
|
+ return ret;
|
|
|
+}
|
|
|
+
|
|
|
+#define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
|
|
|
+#define atomic64_inc(v) atomic64_add(1LL, (v))
|
|
|
+#define atomic64_inc_return(v) atomic64_add_return(1LL, (v))
|
|
|
+#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
|
|
|
+#define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
|
|
|
+#define atomic64_dec(v) atomic64_sub(1LL, (v))
|
|
|
+#define atomic64_dec_return(v) atomic64_sub_return(1LL, (v))
|
|
|
+#define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
|
|
|
+#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1LL, 0LL)
|
|
|
+
|
|
|
+#endif /* !CONFIG_GENERIC_ATOMIC64 */
|
|
|
+#endif
|
|
|
+#endif
|