|
@@ -125,3 +125,150 @@ static __inline__ void atomic_sub(int i, atomic_t * v)
|
|
raw_local_irq_restore(flags);
|
|
raw_local_irq_restore(flags);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
+
|
|
|
|
+/*
|
|
|
|
+ * Same as above, but return the result value
|
|
|
|
+ */
|
|
|
|
+static __inline__ int atomic_add_return(int i, atomic_t * v)
|
|
|
|
+{
|
|
|
|
+ int result;
|
|
|
|
+
|
|
|
|
+ smp_mb__before_llsc();
|
|
|
|
+
|
|
|
|
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
|
|
|
+ int temp;
|
|
|
|
+
|
|
|
|
+ __asm__ __volatile__(
|
|
|
|
+ " .set mips3 \n"
|
|
|
|
+ "1: ll %1, %2 # atomic_add_return \n"
|
|
|
|
+ " addu %0, %1, %3 \n"
|
|
|
|
+ " sc %0, %2 \n"
|
|
|
|
+ " beqzl %0, 1b \n"
|
|
|
|
+ " addu %0, %1, %3 \n"
|
|
|
|
+ " .set mips0 \n"
|
|
|
|
+ : "=&r" (result), "=&r" (temp), "+m" (v->counter)
|
|
|
|
+ : "Ir" (i));
|
|
|
|
+ } else if (kernel_uses_llsc) {
|
|
|
|
+ int temp;
|
|
|
|
+
|
|
|
|
+ do {
|
|
|
|
+ __asm__ __volatile__(
|
|
|
|
+ " .set mips3 \n"
|
|
|
|
+ " ll %1, %2 # atomic_add_return \n"
|
|
|
|
+ " addu %0, %1, %3 \n"
|
|
|
|
+ " sc %0, %2 \n"
|
|
|
|
+ " .set mips0 \n"
|
|
|
|
+ : "=&r" (result), "=&r" (temp), "+m" (v->counter)
|
|
|
|
+ : "Ir" (i));
|
|
|
|
+ } while (unlikely(!result));
|
|
|
|
+
|
|
|
|
+ result = temp + i;
|
|
|
|
+ } else {
|
|
|
|
+ unsigned long flags;
|
|
|
|
+
|
|
|
|
+ raw_local_irq_save(flags);
|
|
|
|
+ result = v->counter;
|
|
|
|
+ result += i;
|
|
|
|
+ v->counter = result;
|
|
|
|
+ raw_local_irq_restore(flags);
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ smp_llsc_mb();
|
|
|
|
+
|
|
|
|
+ return result;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+static __inline__ int atomic_sub_return(int i, atomic_t * v)
|
|
|
|
+{
|
|
|
|
+ int result;
|
|
|
|
+
|
|
|
|
+ smp_mb__before_llsc();
|
|
|
|
+
|
|
|
|
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
|
|
|
+ int temp;
|
|
|
|
+
|
|
|
|
+ __asm__ __volatile__(
|
|
|
|
+ " .set mips3 \n"
|
|
|
|
+ "1: ll %1, %2 # atomic_sub_return \n"
|
|
|
|
+ " subu %0, %1, %3 \n"
|
|
|
|
+ " sc %0, %2 \n"
|
|
|
|
+ " beqzl %0, 1b \n"
|
|
|
|
+ " subu %0, %1, %3 \n"
|
|
|
|
+ " .set mips0 \n"
|
|
|
|
+ : "=&r" (result), "=&r" (temp), "=m" (v->counter)
|
|
|
|
+ : "Ir" (i), "m" (v->counter)
|
|
|
|
+ : "memory");
|
|
|
|
+
|
|
|
|
+ result = temp - i;
|
|
|
|
+ } else if (kernel_uses_llsc) {
|
|
|
|
+ int temp;
|
|
|
|
+
|
|
|
|
+ do {
|
|
|
|
+ __asm__ __volatile__(
|
|
|
|
+ " .set mips3 \n"
|
|
|
|
+ " ll %1, %2 # atomic_sub_return \n"
|
|
|
|
+ " subu %0, %1, %3 \n"
|
|
|
|
+ " sc %0, %2 \n"
|
|
|
|
+ " .set mips0 \n"
|
|
|
|
+ : "=&r" (result), "=&r" (temp), "+m" (v->counter)
|
|
|
|
+ : "Ir" (i));
|
|
|
|
+ } while (unlikely(!result));
|
|
|
|
+
|
|
|
|
+ result = temp - i;
|
|
|
|
+ } else {
|
|
|
|
+ unsigned long flags;
|
|
|
|
+
|
|
|
|
+ raw_local_irq_save(flags);
|
|
|
|
+ result = v->counter;
|
|
|
|
+ result -= i;
|
|
|
|
+ v->counter = result;
|
|
|
|
+ raw_local_irq_restore(flags);
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ smp_llsc_mb();
|
|
|
|
+
|
|
|
|
+ return result;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+/*
|
|
|
|
+ * atomic_sub_if_positive - conditionally subtract integer from atomic variable
|
|
|
|
+ * @i: integer value to subtract
|
|
|
|
+ * @v: pointer of type atomic_t
|
|
|
|
+ *
|
|
|
|
+ * Atomically test @v and subtract @i if @v is greater or equal than @i.
|
|
|
|
+ * The function returns the old value of @v minus @i.
|
|
|
|
+ */
|
|
|
|
+static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
|
|
|
|
+{
|
|
|
|
+ int result;
|
|
|
|
+
|
|
|
|
+ smp_mb__before_llsc();
|
|
|
|
+
|
|
|
|
+ if (kernel_uses_llsc && R10000_LLSC_WAR) {
|
|
|
|
+ int temp;
|
|
|
|
+
|
|
|
|
+ __asm__ __volatile__(
|
|
|
|
+ " .set mips3 \n"
|
|
|
|
+ "1: ll %1, %2 # atomic_sub_if_positive\n"
|
|
|
|
+ " subu %0, %1, %3 \n"
|
|
|
|
+ " bltz %0, 1f \n"
|
|
|
|
+ " sc %0, %2 \n"
|
|
|
|
+ " .set noreorder \n"
|
|
|
|
+ " beqzl %0, 1b \n"
|
|
|
|
+ " subu %0, %1, %3 \n"
|
|
|
|
+ " .set reorder \n"
|
|
|
|
+ "1: \n"
|
|
|
|
+ " .set mips0 \n"
|
|
|
|
+ : "=&r" (result), "=&r" (temp), "+m" (v->counter)
|
|
|
|
+ : "Ir" (i), "m" (v->counter)
|
|
|
|
+ : "memory");
|
|
|
|
+ } else if (kernel_uses_llsc) {
|
|
|
|
+ int temp;
|
|
|
|
+
|
|
|
|
+ __asm__ __volatile__(
|
|
|
|
+ " .set mips3 \n"
|
|
|
|
+ "1: ll %1, %2 # atomic_sub_if_positive\n"
|
|
|
|
+ " subu %0, %1, %3 \n"
|
|
|
|
+ " bltz %0, 1f \n"
|
|
|
|
+ " sc %0, %2 \n"
|
|
|
|
+ " .set noreorder \n"
|