analysisOfTheCausesOfTheFluctuation.h 5.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274
  1. /*
  2. * Atomic operations that C can't guarantee us. Useful for
  3. * resource counting etc..
  4. *
  5. * But use these as seldom as possible since they are much more slower
  6. * than regular operations.
  7. *
  8. * This file is subject to the terms and conditions of the GNU General Public
  9. * License. See the file "COPYING" in the main directory of this archive
  10. * for more details.
  11. *
  12. * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
  13. */
  14. #ifndef _ASM_ATOMIC_H
  15. #define _ASM_ATOMIC_H
  16. #include <linux/irqflags.h>
  17. #include <linux/types.h>
  18. #include <asm/barrier.h>
  19. #include <asm/cpu-features.h>
  20. #include <asm/cmpxchg.h>
  21. #include <asm/war.h>
  22. #define ATOMIC_INIT(i) { (i) }
  23. /*
  24. * atomic_read - read atomic variable
  25. * @v: pointer of type atomic_t
  26. *
  27. * Atomically reads the value of @v.
  28. */
  29. #define atomic_read(v) (*(volatile int *)&(v)->counter)
  30. /*
  31. * atomic_set - set atomic variable
  32. * @v: pointer of type atomic_t
  33. * @i: required value
  34. *
  35. * Atomically sets the value of @v to @i.
  36. */
  37. #define atomic_set(v, i) ((v)->counter = (i))
  38. /*
  39. * atomic_add - add integer to atomic variable
  40. * @i: integer value to add
  41. * @v: pointer of type atomic_t
  42. *
  43. * Atomically adds @i to @v.
  44. */
  45. static __inline__ void atomic_add(int i, atomic_t * v)
  46. {
  47. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  48. int temp;
  49. __asm__ __volatile__(
  50. " .set mips3 \n"
  51. "1: ll %0, %1 # atomic_add \n"
  52. " addu %0, %2 \n"
  53. " sc %0, %1 \n"
  54. " beqzl %0, 1b \n"
  55. " .set mips0 \n"
  56. : "=&r" (temp), "+m" (v->counter)
  57. : "Ir" (i));
  58. } else if (kernel_uses_llsc) {
  59. int temp;
  60. do {
  61. __asm__ __volatile__(
  62. " .set mips3 \n"
  63. " ll %0, %1 # atomic_add \n"
  64. " addu %0, %2 \n"
  65. " sc %0, %1 \n"
  66. " .set mips0 \n"
  67. : "=&r" (temp), "+m" (v->counter)
  68. : "Ir" (i));
  69. } while (unlikely(!temp));
  70. } else {
  71. unsigned long flags;
  72. raw_local_irq_save(flags);
  73. v->counter += i;
  74. raw_local_irq_restore(flags);
  75. }
  76. }
  77. /*
  78. * atomic_sub - subtract the atomic variable
  79. * @i: integer value to subtract
  80. * @v: pointer of type atomic_t
  81. *
  82. * Atomically subtracts @i from @v.
  83. */
  84. static __inline__ void atomic_sub(int i, atomic_t * v)
  85. {
  86. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  87. int temp;
  88. __asm__ __volatile__(
  89. " .set mips3 \n"
  90. "1: ll %0, %1 # atomic_sub \n"
  91. " subu %0, %2 \n"
  92. " sc %0, %1 \n"
  93. " beqzl %0, 1b \n"
  94. " .set mips0 \n"
  95. : "=&r" (temp), "+m" (v->counter)
  96. : "Ir" (i));
  97. } else if (kernel_uses_llsc) {
  98. int temp;
  99. do {
  100. __asm__ __volatile__(
  101. " .set mips3 \n"
  102. " ll %0, %1 # atomic_sub \n"
  103. " subu %0, %2 \n"
  104. " sc %0, %1 \n"
  105. " .set mips0 \n"
  106. : "=&r" (temp), "+m" (v->counter)
  107. : "Ir" (i));
  108. } while (unlikely(!temp));
  109. } else {
  110. unsigned long flags;
  111. raw_local_irq_save(flags);
  112. v->counter -= i;
  113. raw_local_irq_restore(flags);
  114. }
  115. }
  116. /*
  117. * Same as above, but return the result value
  118. */
  119. static __inline__ int atomic_add_return(int i, atomic_t * v)
  120. {
  121. int result;
  122. smp_mb__before_llsc();
  123. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  124. int temp;
  125. __asm__ __volatile__(
  126. " .set mips3 \n"
  127. "1: ll %1, %2 # atomic_add_return \n"
  128. " addu %0, %1, %3 \n"
  129. " sc %0, %2 \n"
  130. " beqzl %0, 1b \n"
  131. " addu %0, %1, %3 \n"
  132. " .set mips0 \n"
  133. : "=&r" (result), "=&r" (temp), "+m" (v->counter)
  134. : "Ir" (i));
  135. } else if (kernel_uses_llsc) {
  136. int temp;
  137. do {
  138. __asm__ __volatile__(
  139. " .set mips3 \n"
  140. " ll %1, %2 # atomic_add_return \n"
  141. " addu %0, %1, %3 \n"
  142. " sc %0, %2 \n"
  143. " .set mips0 \n"
  144. : "=&r" (result), "=&r" (temp), "+m" (v->counter)
  145. : "Ir" (i));
  146. } while (unlikely(!result));
  147. result = temp + i;
  148. } else {
  149. unsigned long flags;
  150. raw_local_irq_save(flags);
  151. result = v->counter;
  152. result += i;
  153. v->counter = result;
  154. raw_local_irq_restore(flags);
  155. }
  156. smp_llsc_mb();
  157. return result;
  158. }
  159. static __inline__ int atomic_sub_return(int i, atomic_t * v)
  160. {
  161. int result;
  162. smp_mb__before_llsc();
  163. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  164. int temp;
  165. __asm__ __volatile__(
  166. " .set mips3 \n"
  167. "1: ll %1, %2 # atomic_sub_return \n"
  168. " subu %0, %1, %3 \n"
  169. " sc %0, %2 \n"
  170. " beqzl %0, 1b \n"
  171. " subu %0, %1, %3 \n"
  172. " .set mips0 \n"
  173. : "=&r" (result), "=&r" (temp), "=m" (v->counter)
  174. : "Ir" (i), "m" (v->counter)
  175. : "memory");
  176. result = temp - i;
  177. } else if (kernel_uses_llsc) {
  178. int temp;
  179. do {
  180. __asm__ __volatile__(
  181. " .set mips3 \n"
  182. " ll %1, %2 # atomic_sub_return \n"
  183. " subu %0, %1, %3 \n"
  184. " sc %0, %2 \n"
  185. " .set mips0 \n"
  186. : "=&r" (result), "=&r" (temp), "+m" (v->counter)
  187. : "Ir" (i));
  188. } while (unlikely(!result));
  189. result = temp - i;
  190. } else {
  191. unsigned long flags;
  192. raw_local_irq_save(flags);
  193. result = v->counter;
  194. result -= i;
  195. v->counter = result;
  196. raw_local_irq_restore(flags);
  197. }
  198. smp_llsc_mb();
  199. return result;
  200. }
  201. /*
  202. * atomic_sub_if_positive - conditionally subtract integer from atomic variable
  203. * @i: integer value to subtract
  204. * @v: pointer of type atomic_t
  205. *
  206. * Atomically test @v and subtract @i if @v is greater or equal than @i.
  207. * The function returns the old value of @v minus @i.
  208. */
  209. static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
  210. {
  211. int result;
  212. smp_mb__before_llsc();
  213. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  214. int temp;
  215. __asm__ __volatile__(
  216. " .set mips3 \n"
  217. "1: ll %1, %2 # atomic_sub_if_positive\n"
  218. " subu %0, %1, %3 \n"
  219. " bltz %0, 1f \n"
  220. " sc %0, %2 \n"
  221. " .set noreorder \n"
  222. " beqzl %0, 1b \n"
  223. " subu %0, %1, %3 \n"
  224. " .set reorder \n"
  225. "1: \n"
  226. " .set mips0 \n"
  227. : "=&r" (result), "=&r" (temp), "+m" (v->counter)
  228. : "Ir" (i), "m" (v->counter)
  229. : "memory");
  230. } else if (kernel_uses_llsc) {
  231. int temp;
  232. __asm__ __volatile__(
  233. " .set mips3 \n"
  234. "1: ll %1, %2 # atomic_sub_if_positive\n"
  235. " subu %0, %1, %3 \n"
  236. " bltz %0, 1f \n"
  237. " sc %0, %2 \n"
  238. " .set noreorder \n"