|
@@ -458,3 +458,159 @@ static __inline__ void atomic64_add(long i, atomic64_t * v)
|
|
* Atomically subtracts @i from @v.
|
|
* Atomically subtracts @i from @v.
|
|
*/
|
|
*/
|
|
static __inline__ void atomic64_sub(long i, atomic64_t * v)
|
|
static __inline__ void atomic64_sub(long i, atomic64_t * v)
|
|
|
|
+{
|
|
|
|
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
|
|
|
+ long temp;
|
|
|
|
+
|
|
|
|
+ __asm__ __volatile__(
|
|
|
|
+ " .set mips3 \n"
|
|
|
|
+ "1: lld %0, %1 # atomic64_sub \n"
|
|
|
|
+ " dsubu %0, %2 \n"
|
|
|
|
+ " scd %0, %1 \n"
|
|
|
|
+ " beqzl %0, 1b \n"
|
|
|
|
+ " .set mips0 \n"
|
|
|
|
+ : "=&r" (temp), "+m" (v->counter)
|
|
|
|
+ : "Ir" (i));
|
|
|
|
+ } else if (kernel_uses_llsc) {
|
|
|
|
+ long temp;
|
|
|
|
+
|
|
|
|
+ do {
|
|
|
|
+ __asm__ __volatile__(
|
|
|
|
+ " .set mips3 \n"
|
|
|
|
+ " lld %0, %1 # atomic64_sub \n"
|
|
|
|
+ " dsubu %0, %2 \n"
|
|
|
|
+ " scd %0, %1 \n"
|
|
|
|
+ " .set mips0 \n"
|
|
|
|
+ : "=&r" (temp), "+m" (v->counter)
|
|
|
|
+ : "Ir" (i));
|
|
|
|
+ } while (unlikely(!temp));
|
|
|
|
+ } else {
|
|
|
|
+ unsigned long flags;
|
|
|
|
+
|
|
|
|
+ raw_local_irq_save(flags);
|
|
|
|
+ v->counter -= i;
|
|
|
|
+ raw_local_irq_restore(flags);
|
|
|
|
+ }
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/*
|
|
|
|
+ * Same as above, but return the result value
|
|
|
|
+ */
|
|
|
|
+static __inline__ long atomic64_add_return(long i, atomic64_t * v)
|
|
|
|
+{
|
|
|
|
+ long result;
|
|
|
|
+
|
|
|
|
+ smp_mb__before_llsc();
|
|
|
|
+
|
|
|
|
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
|
|
|
+ long temp;
|
|
|
|
+
|
|
|
|
+ __asm__ __volatile__(
|
|
|
|
+ " .set mips3 \n"
|
|
|
|
+ "1: lld %1, %2 # atomic64_add_return \n"
|
|
|
|
+ " daddu %0, %1, %3 \n"
|
|
|
|
+ " scd %0, %2 \n"
|
|
|
|
+ " beqzl %0, 1b \n"
|
|
|
|
+ " daddu %0, %1, %3 \n"
|
|
|
|
+ " .set mips0 \n"
|
|
|
|
+ : "=&r" (result), "=&r" (temp), "+m" (v->counter)
|
|
|
|
+ : "Ir" (i));
|
|
|
|
+ } else if (kernel_uses_llsc) {
|
|
|
|
+ long temp;
|
|
|
|
+
|
|
|
|
+ do {
|
|
|
|
+ __asm__ __volatile__(
|
|
|
|
+ " .set mips3 \n"
|
|
|
|
+ " lld %1, %2 # atomic64_add_return \n"
|
|
|
|
+ " daddu %0, %1, %3 \n"
|
|
|
|
+ " scd %0, %2 \n"
|
|
|
|
+ " .set mips0 \n"
|
|
|
|
+ : "=&r" (result), "=&r" (temp), "=m" (v->counter)
|
|
|
|
+ : "Ir" (i), "m" (v->counter)
|
|
|
|
+ : "memory");
|
|
|
|
+ } while (unlikely(!result));
|
|
|
|
+
|
|
|
|
+ result = temp + i;
|
|
|
|
+ } else {
|
|
|
|
+ unsigned long flags;
|
|
|
|
+
|
|
|
|
+ raw_local_irq_save(flags);
|
|
|
|
+ result = v->counter;
|
|
|
|
+ result += i;
|
|
|
|
+ v->counter = result;
|
|
|
|
+ raw_local_irq_restore(flags);
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ smp_llsc_mb();
|
|
|
|
+
|
|
|
|
+ return result;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
|
|
|
|
+{
|
|
|
|
+ long result;
|
|
|
|
+
|
|
|
|
+ smp_mb__before_llsc();
|
|
|
|
+
|
|
|
|
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
|
|
|
+ long temp;
|
|
|
|
+
|
|
|
|
+ __asm__ __volatile__(
|
|
|
|
+ " .set mips3 \n"
|
|
|
|
+ "1: lld %1, %2 # atomic64_sub_return \n"
|
|
|
|
+ " dsubu %0, %1, %3 \n"
|
|
|
|
+ " scd %0, %2 \n"
|
|
|
|
+ " beqzl %0, 1b \n"
|
|
|
|
+ " dsubu %0, %1, %3 \n"
|
|
|
|
+ " .set mips0 \n"
|
|
|
|
+ : "=&r" (result), "=&r" (temp), "=m" (v->counter)
|
|
|
|
+ : "Ir" (i), "m" (v->counter)
|
|
|
|
+ : "memory");
|
|
|
|
+ } else if (kernel_uses_llsc) {
|
|
|
|
+ long temp;
|
|
|
|
+
|
|
|
|
+ do {
|
|
|
|
+ __asm__ __volatile__(
|
|
|
|
+ " .set mips3 \n"
|
|
|
|
+ " lld %1, %2 # atomic64_sub_return \n"
|
|
|
|
+ " dsubu %0, %1, %3 \n"
|
|
|
|
+ " scd %0, %2 \n"
|
|
|
|
+ " .set mips0 \n"
|
|
|
|
+ : "=&r" (result), "=&r" (temp), "=m" (v->counter)
|
|
|
|
+ : "Ir" (i), "m" (v->counter)
|
|
|
|
+ : "memory");
|
|
|
|
+ } while (unlikely(!result));
|
|
|
|
+
|
|
|
|
+ result = temp - i;
|
|
|
|
+ } else {
|
|
|
|
+ unsigned long flags;
|
|
|
|
+
|
|
|
|
+ raw_local_irq_save(flags);
|
|
|
|
+ result = v->counter;
|
|
|
|
+ result -= i;
|
|
|
|
+ v->counter = result;
|
|
|
|
+ raw_local_irq_restore(flags);
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ smp_llsc_mb();
|
|
|
|
+
|
|
|
|
+ return result;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/*
|
|
|
|
+ * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
|
|
|
|
+ * @i: integer value to subtract
|
|
|
|
+ * @v: pointer of type atomic64_t
|
|
|
|
+ *
|
|
|
|
+ * Atomically test @v and subtract @i if @v is greater or equal than @i.
|
|
|
|
+ * The function returns the old value of @v minus @i.
|
|
|
|
+ */
|
|
|
|
+static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
|
|
|
|
+{
|
|
|
|
+ long result;
|
|
|
|
+
|
|
|
|
+ smp_mb__before_llsc();
|
|
|
|
+
|
|
|
|
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
|
|
|
+ long temp;
|
|
|
|
+
|