|
@@ -403,3 +403,58 @@ static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
|
|
|
/*
|
|
|
* atomic64_set - set atomic variable
|
|
|
* @v: pointer of type atomic64_t
|
|
|
+ * @i: required value
|
|
|
+ */
|
|
|
+#define atomic64_set(v, i) ((v)->counter = (i))
|
|
|
+
|
|
|
+/*
|
|
|
+ * atomic64_add - add integer to atomic variable
|
|
|
+ * @i: integer value to add
|
|
|
+ * @v: pointer of type atomic64_t
|
|
|
+ *
|
|
|
+ * Atomically adds @i to @v.
|
|
|
+ */
|
|
|
+static __inline__ void atomic64_add(long i, atomic64_t * v)
|
|
|
+{
|
|
|
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
|
|
+ long temp;
|
|
|
+
|
|
|
+ __asm__ __volatile__(
|
|
|
+ " .set mips3 \n"
|
|
|
+ "1: lld %0, %1 # atomic64_add \n"
|
|
|
+ " daddu %0, %2 \n"
|
|
|
+ " scd %0, %1 \n"
|
|
|
+ " beqzl %0, 1b \n"
|
|
|
+ " .set mips0 \n"
|
|
|
+ : "=&r" (temp), "+m" (v->counter)
|
|
|
+ : "Ir" (i));
|
|
|
+ } else if (kernel_uses_llsc) {
|
|
|
+ long temp;
|
|
|
+
|
|
|
+ do {
|
|
|
+ __asm__ __volatile__(
|
|
|
+ " .set mips3 \n"
|
|
|
+ " lld %0, %1 # atomic64_add \n"
|
|
|
+ " daddu %0, %2 \n"
|
|
|
+ " scd %0, %1 \n"
|
|
|
+ " .set mips0 \n"
|
|
|
+ : "=&r" (temp), "+m" (v->counter)
|
|
|
+ : "Ir" (i));
|
|
|
+ } while (unlikely(!temp));
|
|
|
+ } else {
|
|
|
+ unsigned long flags;
|
|
|
+
|
|
|
+ raw_local_irq_save(flags);
|
|
|
+ v->counter += i;
|
|
|
+ raw_local_irq_restore(flags);
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+/*
|
|
|
+ * atomic64_sub - subtract the atomic variable
|
|
|
+ * @i: integer value to subtract
|
|
|
+ * @v: pointer of type atomic64_t
|
|
|
+ *
|
|
|
+ * Atomically subtracts @i from @v.
|
|
|
+ */
|
|
|
+static __inline__ void atomic64_sub(long i, atomic64_t * v)
|