|  | @@ -614,3 +614,160 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
 | 
	
		
			
				|  |  |  	if (kernel_uses_llsc && R10000_LLSC_WAR) {
 | 
	
		
			
				|  |  |  		long temp;
 | 
	
		
			
				|  |  |  
 | 
	
		
			
				|  |  | +		__asm__ __volatile__(
 | 
	
		
			
				|  |  | +		"	.set	mips3					\n"
 | 
	
		
			
				|  |  | +		"1:	lld	%1, %2		# atomic64_sub_if_positive\n"
 | 
	
		
			
				|  |  | +		"	dsubu	%0, %1, %3				\n"
 | 
	
		
			
				|  |  | +		"	bltz	%0, 1f					\n"
 | 
	
		
			
				|  |  | +		"	scd	%0, %2					\n"
 | 
	
		
			
				|  |  | +		"	.set	noreorder				\n"
 | 
	
		
			
				|  |  | +		"	beqzl	%0, 1b					\n"
 | 
	
		
			
				|  |  | +		"	 dsubu	%0, %1, %3				\n"
 | 
	
		
			
				|  |  | +		"	.set	reorder					\n"
 | 
	
		
			
				|  |  | +		"1:							\n"
 | 
	
		
			
				|  |  | +		"	.set	mips0					\n"
 | 
	
		
			
				|  |  | +		: "=&r" (result), "=&r" (temp), "=m" (v->counter)
 | 
	
		
			
				|  |  | +		: "Ir" (i), "m" (v->counter)
 | 
	
		
			
				|  |  | +		: "memory");
 | 
	
		
			
				|  |  | +	} else if (kernel_uses_llsc) {
 | 
	
		
			
				|  |  | +		long temp;
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +		__asm__ __volatile__(
 | 
	
		
			
				|  |  | +		"	.set	mips3					\n"
 | 
	
		
			
				|  |  | +		"1:	lld	%1, %2		# atomic64_sub_if_positive\n"
 | 
	
		
			
				|  |  | +		"	dsubu	%0, %1, %3				\n"
 | 
	
		
			
				|  |  | +		"	bltz	%0, 1f					\n"
 | 
	
		
			
				|  |  | +		"	scd	%0, %2					\n"
 | 
	
		
			
				|  |  | +		"	.set	noreorder				\n"
 | 
	
		
			
				|  |  | +		"	beqz	%0, 1b					\n"
 | 
	
		
			
				|  |  | +		"	 dsubu	%0, %1, %3				\n"
 | 
	
		
			
				|  |  | +		"	.set	reorder					\n"
 | 
	
		
			
				|  |  | +		"1:							\n"
 | 
	
		
			
				|  |  | +		"	.set	mips0					\n"
 | 
	
		
			
				|  |  | +		: "=&r" (result), "=&r" (temp), "+m" (v->counter)
 | 
	
		
			
				|  |  | +		: "Ir" (i));
 | 
	
		
			
				|  |  | +	} else {
 | 
	
		
			
				|  |  | +		unsigned long flags;
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +		raw_local_irq_save(flags);
 | 
	
		
			
				|  |  | +		result = v->counter;
 | 
	
		
			
				|  |  | +		result -= i;
 | 
	
		
			
				|  |  | +		if (result >= 0)
 | 
	
		
			
				|  |  | +			v->counter = result;
 | 
	
		
			
				|  |  | +		raw_local_irq_restore(flags);
 | 
	
		
			
				|  |  | +	}
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +	smp_llsc_mb();
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +	return result;
 | 
	
		
			
				|  |  | +}
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +#define atomic64_cmpxchg(v, o, n) \
 | 
	
		
			
				|  |  | +	((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
 | 
	
		
			
				|  |  | +#define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +/**
 | 
	
		
			
				|  |  | + * atomic64_add_unless - add unless the number is a given value
 | 
	
		
			
				|  |  | + * @v: pointer of type atomic64_t
 | 
	
		
			
				|  |  | + * @a: the amount to add to v...
 | 
	
		
			
				|  |  | + * @u: ...unless v is equal to u.
 | 
	
		
			
				|  |  | + *
 | 
	
		
			
				|  |  | + * Atomically adds @a to @v, so long as it was not @u.
 | 
	
		
			
				|  |  | + * Returns the old value of @v.
 | 
	
		
			
				|  |  | + */
 | 
	
		
			
				|  |  | +static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
 | 
	
		
			
				|  |  | +{
 | 
	
		
			
				|  |  | +	long c, old;
 | 
	
		
			
				|  |  | +	c = atomic64_read(v);
 | 
	
		
			
				|  |  | +	for (;;) {
 | 
	
		
			
				|  |  | +		if (unlikely(c == (u)))
 | 
	
		
			
				|  |  | +			break;
 | 
	
		
			
				|  |  | +		old = atomic64_cmpxchg((v), c, c + (a));
 | 
	
		
			
				|  |  | +		if (likely(old == c))
 | 
	
		
			
				|  |  | +			break;
 | 
	
		
			
				|  |  | +		c = old;
 | 
	
		
			
				|  |  | +	}
 | 
	
		
			
				|  |  | +	return c != (u);
 | 
	
		
			
				|  |  | +}
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +#define atomic64_dec_return(v) atomic64_sub_return(1, (v))
 | 
	
		
			
				|  |  | +#define atomic64_inc_return(v) atomic64_add_return(1, (v))
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +/*
 | 
	
		
			
				|  |  | + * atomic64_sub_and_test - subtract value from variable and test result
 | 
	
		
			
				|  |  | + * @i: integer value to subtract
 | 
	
		
			
				|  |  | + * @v: pointer of type atomic64_t
 | 
	
		
			
				|  |  | + *
 | 
	
		
			
				|  |  | + * Atomically subtracts @i from @v and returns
 | 
	
		
			
				|  |  | + * true if the result is zero, or false for all
 | 
	
		
			
				|  |  | + * other cases.
 | 
	
		
			
				|  |  | + */
 | 
	
		
			
				|  |  | +#define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +/*
 | 
	
		
			
				|  |  | + * atomic64_inc_and_test - increment and test
 | 
	
		
			
				|  |  | + * @v: pointer of type atomic64_t
 | 
	
		
			
				|  |  | + *
 | 
	
		
			
				|  |  | + * Atomically increments @v by 1
 | 
	
		
			
				|  |  | + * and returns true if the result is zero, or false for all
 | 
	
		
			
				|  |  | + * other cases.
 | 
	
		
			
				|  |  | + */
 | 
	
		
			
				|  |  | +#define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +/*
 | 
	
		
			
				|  |  | + * atomic64_dec_and_test - decrement by 1 and test
 | 
	
		
			
				|  |  | + * @v: pointer of type atomic64_t
 | 
	
		
			
				|  |  | + *
 | 
	
		
			
				|  |  | + * Atomically decrements @v by 1 and
 | 
	
		
			
				|  |  | + * returns true if the result is 0, or false for all other
 | 
	
		
			
				|  |  | + * cases.
 | 
	
		
			
				|  |  | + */
 | 
	
		
			
				|  |  | +#define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +/*
 | 
	
		
			
				|  |  | + * atomic64_dec_if_positive - decrement by 1 if old value positive
 | 
	
		
			
				|  |  | + * @v: pointer of type atomic64_t
 | 
	
		
			
				|  |  | + */
 | 
	
		
			
				|  |  | +#define atomic64_dec_if_positive(v)	atomic64_sub_if_positive(1, v)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +/*
 | 
	
		
			
				|  |  | + * atomic64_inc - increment atomic variable
 | 
	
		
			
				|  |  | + * @v: pointer of type atomic64_t
 | 
	
		
			
				|  |  | + *
 | 
	
		
			
				|  |  | + * Atomically increments @v by 1.
 | 
	
		
			
				|  |  | + */
 | 
	
		
			
				|  |  | +#define atomic64_inc(v) atomic64_add(1, (v))
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +/*
 | 
	
		
			
				|  |  | + * atomic64_dec - decrement and test
 | 
	
		
			
				|  |  | + * @v: pointer of type atomic64_t
 | 
	
		
			
				|  |  | + *
 | 
	
		
			
				|  |  | + * Atomically decrements @v by 1.
 | 
	
		
			
				|  |  | + */
 | 
	
		
			
				|  |  | +#define atomic64_dec(v) atomic64_sub(1, (v))
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +/*
 | 
	
		
			
				|  |  | + * atomic64_add_negative - add and test if negative
 | 
	
		
			
				|  |  | + * @v: pointer of type atomic64_t
 | 
	
		
			
				|  |  | + * @i: integer value to add
 | 
	
		
			
				|  |  | + *
 | 
	
		
			
				|  |  | + * Atomically adds @i to @v and returns true
 | 
	
		
			
				|  |  | + * if the result is negative, or false when
 | 
	
		
			
				|  |  | + * result is greater than or equal to zero.
 | 
	
		
			
				|  |  | + */
 | 
	
		
			
				|  |  | +#define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +#endif /* CONFIG_64BIT */
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +/*
 | 
	
		
			
				|  |  | + * atomic*_return operations are serializing but not the non-*_return
 | 
	
		
			
				|  |  | + * versions.
 | 
	
		
			
				|  |  | + */
 | 
	
		
			
				|  |  | +#define smp_mb__before_atomic_dec()	smp_mb__before_llsc()
 | 
	
		
			
				|  |  | +#define smp_mb__after_atomic_dec()	smp_llsc_mb()
 | 
	
		
			
				|  |  | +#define smp_mb__before_atomic_inc()	smp_mb__before_llsc()
 | 
	
		
			
				|  |  | +#define smp_mb__after_atomic_inc()	smp_llsc_mb()
 | 
	
		
			
				|  |  | +
 | 
	
		
			
				|  |  | +#endif /* _ASM_ATOMIC_H */
 |