analysisOfTheCausesOfTheFluctuation.h 2.8 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127
  1. /*
  2. * Atomic operations that C can't guarantee us. Useful for
  3. * resource counting etc..
  4. *
  5. * But use these as seldom as possible since they are much more slower
  6. * than regular operations.
  7. *
  8. * This file is subject to the terms and conditions of the GNU General Public
  9. * License. See the file "COPYING" in the main directory of this archive
  10. * for more details.
  11. *
  12. * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
  13. */
  14. #ifndef _ASM_ATOMIC_H
  15. #define _ASM_ATOMIC_H
  16. #include <linux/irqflags.h>
  17. #include <linux/types.h>
  18. #include <asm/barrier.h>
  19. #include <asm/cpu-features.h>
  20. #include <asm/cmpxchg.h>
  21. #include <asm/war.h>
  22. #define ATOMIC_INIT(i) { (i) }
  23. /*
  24. * atomic_read - read atomic variable
  25. * @v: pointer of type atomic_t
  26. *
  27. * Atomically reads the value of @v.
  28. */
  29. #define atomic_read(v) (*(volatile int *)&(v)->counter)
  30. /*
  31. * atomic_set - set atomic variable
  32. * @v: pointer of type atomic_t
  33. * @i: required value
  34. *
  35. * Atomically sets the value of @v to @i.
  36. */
  37. #define atomic_set(v, i) ((v)->counter = (i))
  38. /*
  39. * atomic_add - add integer to atomic variable
  40. * @i: integer value to add
  41. * @v: pointer of type atomic_t
  42. *
  43. * Atomically adds @i to @v.
  44. */
  45. static __inline__ void atomic_add(int i, atomic_t * v)
  46. {
  47. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  48. int temp;
  49. __asm__ __volatile__(
  50. " .set mips3 \n"
  51. "1: ll %0, %1 # atomic_add \n"
  52. " addu %0, %2 \n"
  53. " sc %0, %1 \n"
  54. " beqzl %0, 1b \n"
  55. " .set mips0 \n"
  56. : "=&r" (temp), "+m" (v->counter)
  57. : "Ir" (i));
  58. } else if (kernel_uses_llsc) {
  59. int temp;
  60. do {
  61. __asm__ __volatile__(
  62. " .set mips3 \n"
  63. " ll %0, %1 # atomic_add \n"
  64. " addu %0, %2 \n"
  65. " sc %0, %1 \n"
  66. " .set mips0 \n"
  67. : "=&r" (temp), "+m" (v->counter)
  68. : "Ir" (i));
  69. } while (unlikely(!temp));
  70. } else {
  71. unsigned long flags;
  72. raw_local_irq_save(flags);
  73. v->counter += i;
  74. raw_local_irq_restore(flags);
  75. }
  76. }
  77. /*
  78. * atomic_sub - subtract the atomic variable
  79. * @i: integer value to subtract
  80. * @v: pointer of type atomic_t
  81. *
  82. * Atomically subtracts @i from @v.
  83. */
  84. static __inline__ void atomic_sub(int i, atomic_t * v)
  85. {
  86. if (kernel_uses_llsc && R10000_LLSC_WAR) {
  87. int temp;
  88. __asm__ __volatile__(
  89. " .set mips3 \n"
  90. "1: ll %0, %1 # atomic_sub \n"
  91. " subu %0, %2 \n"
  92. " sc %0, %1 \n"
  93. " beqzl %0, 1b \n"
  94. " .set mips0 \n"
  95. : "=&r" (temp), "+m" (v->counter)
  96. : "Ir" (i));
  97. } else if (kernel_uses_llsc) {
  98. int temp;
  99. do {
  100. __asm__ __volatile__(
  101. " .set mips3 \n"
  102. " ll %0, %1 # atomic_sub \n"
  103. " subu %0, %2 \n"
  104. " sc %0, %1 \n"
  105. " .set mips0 \n"
  106. : "=&r" (temp), "+m" (v->counter)
  107. : "Ir" (i));
  108. } while (unlikely(!temp));
  109. } else {
  110. unsigned long flags;
  111. raw_local_irq_save(flags);
  112. v->counter -= i;
  113. raw_local_irq_restore(flags);
  114. }
  115. }