hiddenDangerAnalysis.h 7.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304
  1. #ifndef _M68K_BITOPS_H
  2. #define _M68K_BITOPS_H
  3. /*
  4. * Copyright 1992, Linus Torvalds.
  5. *
  6. * This file is subject to the terms and conditions of the GNU General Public
  7. * License. See the file COPYING in the main directory of this archive
  8. * for more details.
  9. */
  10. #ifndef _LINUX_BITOPS_H
  11. #error only <linux/bitops.h> can be included directly
  12. #endif
  13. #include <linux/compiler.h>
  14. /*
  15. * Bit access functions vary across the ColdFire and 68k families.
  16. * So we will break them out here, and then macro in the ones we want.
  17. *
  18. * ColdFire - supports standard bset/bclr/bchg with register operand only
  19. * 68000 - supports standard bset/bclr/bchg with memory operand
  20. * >= 68020 - also supports the bfset/bfclr/bfchg instructions
  21. *
  22. * Although it is possible to use only the bset/bclr/bchg with register
  23. * operands on all platforms you end up with larger generated code.
  24. * So we use the best form possible on a given platform.
  25. */
  26. static inline void bset_reg_set_bit(int nr, volatile unsigned long *vaddr)
  27. {
  28. char *p = (char *)vaddr + (nr ^ 31) / 8;
  29. __asm__ __volatile__ ("bset %1,(%0)"
  30. :
  31. : "a" (p), "di" (nr & 7)
  32. : "memory");
  33. }
  34. static inline void bset_mem_set_bit(int nr, volatile unsigned long *vaddr)
  35. {
  36. char *p = (char *)vaddr + (nr ^ 31) / 8;
  37. __asm__ __volatile__ ("bset %1,%0"
  38. : "+m" (*p)
  39. : "di" (nr & 7));
  40. }
  41. static inline void bfset_mem_set_bit(int nr, volatile unsigned long *vaddr)
  42. {
  43. __asm__ __volatile__ ("bfset %1{%0:#1}"
  44. :
  45. : "d" (nr ^ 31), "o" (*vaddr)
  46. : "memory");
  47. }
  48. #if defined(CONFIG_COLDFIRE)
  49. #define set_bit(nr, vaddr) bset_reg_set_bit(nr, vaddr)
  50. #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
  51. #define set_bit(nr, vaddr) bset_mem_set_bit(nr, vaddr)
  52. #else
  53. #define set_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
  54. bset_mem_set_bit(nr, vaddr) : \
  55. bfset_mem_set_bit(nr, vaddr))
  56. #endif
  57. #define __set_bit(nr, vaddr) set_bit(nr, vaddr)
  58. /*
  59. * clear_bit() doesn't provide any barrier for the compiler.
  60. */
  61. #define smp_mb__before_clear_bit() barrier()
  62. #define smp_mb__after_clear_bit() barrier()
  63. static inline void bclr_reg_clear_bit(int nr, volatile unsigned long *vaddr)
  64. {
  65. char *p = (char *)vaddr + (nr ^ 31) / 8;
  66. __asm__ __volatile__ ("bclr %1,(%0)"
  67. :
  68. : "a" (p), "di" (nr & 7)
  69. : "memory");
  70. }
  71. static inline void bclr_mem_clear_bit(int nr, volatile unsigned long *vaddr)
  72. {
  73. char *p = (char *)vaddr + (nr ^ 31) / 8;
  74. __asm__ __volatile__ ("bclr %1,%0"
  75. : "+m" (*p)
  76. : "di" (nr & 7));
  77. }
  78. static inline void bfclr_mem_clear_bit(int nr, volatile unsigned long *vaddr)
  79. {
  80. __asm__ __volatile__ ("bfclr %1{%0:#1}"
  81. :
  82. : "d" (nr ^ 31), "o" (*vaddr)
  83. : "memory");
  84. }
  85. #if defined(CONFIG_COLDFIRE)
  86. #define clear_bit(nr, vaddr) bclr_reg_clear_bit(nr, vaddr)
  87. #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
  88. #define clear_bit(nr, vaddr) bclr_mem_clear_bit(nr, vaddr)
  89. #else
  90. #define clear_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
  91. bclr_mem_clear_bit(nr, vaddr) : \
  92. bfclr_mem_clear_bit(nr, vaddr))
  93. #endif
  94. #define __clear_bit(nr, vaddr) clear_bit(nr, vaddr)
  95. static inline void bchg_reg_change_bit(int nr, volatile unsigned long *vaddr)
  96. {
  97. char *p = (char *)vaddr + (nr ^ 31) / 8;
  98. __asm__ __volatile__ ("bchg %1,(%0)"
  99. :
  100. : "a" (p), "di" (nr & 7)
  101. : "memory");
  102. }
  103. static inline void bchg_mem_change_bit(int nr, volatile unsigned long *vaddr)
  104. {
  105. char *p = (char *)vaddr + (nr ^ 31) / 8;
  106. __asm__ __volatile__ ("bchg %1,%0"
  107. : "+m" (*p)
  108. : "di" (nr & 7));
  109. }
  110. static inline void bfchg_mem_change_bit(int nr, volatile unsigned long *vaddr)
  111. {
  112. __asm__ __volatile__ ("bfchg %1{%0:#1}"
  113. :
  114. : "d" (nr ^ 31), "o" (*vaddr)
  115. : "memory");
  116. }
  117. #if defined(CONFIG_COLDFIRE)
  118. #define change_bit(nr, vaddr) bchg_reg_change_bit(nr, vaddr)
  119. #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
  120. #define change_bit(nr, vaddr) bchg_mem_change_bit(nr, vaddr)
  121. #else
  122. #define change_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
  123. bchg_mem_change_bit(nr, vaddr) : \
  124. bfchg_mem_change_bit(nr, vaddr))
  125. #endif
  126. #define __change_bit(nr, vaddr) change_bit(nr, vaddr)
  127. static inline int test_bit(int nr, const unsigned long *vaddr)
  128. {
  129. return (vaddr[nr >> 5] & (1UL << (nr & 31))) != 0;
  130. }
  131. static inline int bset_reg_test_and_set_bit(int nr,
  132. volatile unsigned long *vaddr)
  133. {
  134. char *p = (char *)vaddr + (nr ^ 31) / 8;
  135. char retval;
  136. __asm__ __volatile__ ("bset %2,(%1); sne %0"
  137. : "=d" (retval)
  138. : "a" (p), "di" (nr & 7)
  139. : "memory");
  140. return retval;
  141. }
  142. static inline int bset_mem_test_and_set_bit(int nr,
  143. volatile unsigned long *vaddr)
  144. {
  145. char *p = (char *)vaddr + (nr ^ 31) / 8;
  146. char retval;
  147. __asm__ __volatile__ ("bset %2,%1; sne %0"
  148. : "=d" (retval), "+m" (*p)
  149. : "di" (nr & 7));
  150. return retval;
  151. }
  152. static inline int bfset_mem_test_and_set_bit(int nr,
  153. volatile unsigned long *vaddr)
  154. {
  155. char retval;
  156. __asm__ __volatile__ ("bfset %2{%1:#1}; sne %0"
  157. : "=d" (retval)
  158. : "d" (nr ^ 31), "o" (*vaddr)
  159. : "memory");
  160. return retval;
  161. }
  162. #if defined(CONFIG_COLDFIRE)
  163. #define test_and_set_bit(nr, vaddr) bset_reg_test_and_set_bit(nr, vaddr)
  164. #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
  165. #define test_and_set_bit(nr, vaddr) bset_mem_test_and_set_bit(nr, vaddr)
  166. #else
  167. #define test_and_set_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
  168. bset_mem_test_and_set_bit(nr, vaddr) : \
  169. bfset_mem_test_and_set_bit(nr, vaddr))
  170. #endif
  171. #define __test_and_set_bit(nr, vaddr) test_and_set_bit(nr, vaddr)
  172. static inline int bclr_reg_test_and_clear_bit(int nr,
  173. volatile unsigned long *vaddr)
  174. {
  175. char *p = (char *)vaddr + (nr ^ 31) / 8;
  176. char retval;
  177. __asm__ __volatile__ ("bclr %2,(%1); sne %0"
  178. : "=d" (retval)
  179. : "a" (p), "di" (nr & 7)
  180. : "memory");
  181. return retval;
  182. }
  183. static inline int bclr_mem_test_and_clear_bit(int nr,
  184. volatile unsigned long *vaddr)
  185. {
  186. char *p = (char *)vaddr + (nr ^ 31) / 8;
  187. char retval;
  188. __asm__ __volatile__ ("bclr %2,%1; sne %0"
  189. : "=d" (retval), "+m" (*p)
  190. : "di" (nr & 7));
  191. return retval;
  192. }
  193. static inline int bfclr_mem_test_and_clear_bit(int nr,
  194. volatile unsigned long *vaddr)
  195. {
  196. char retval;
  197. __asm__ __volatile__ ("bfclr %2{%1:#1}; sne %0"
  198. : "=d" (retval)
  199. : "d" (nr ^ 31), "o" (*vaddr)
  200. : "memory");
  201. return retval;
  202. }
  203. #if defined(CONFIG_COLDFIRE)
  204. #define test_and_clear_bit(nr, vaddr) bclr_reg_test_and_clear_bit(nr, vaddr)
  205. #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
  206. #define test_and_clear_bit(nr, vaddr) bclr_mem_test_and_clear_bit(nr, vaddr)
  207. #else
  208. #define test_and_clear_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
  209. bclr_mem_test_and_clear_bit(nr, vaddr) : \
  210. bfclr_mem_test_and_clear_bit(nr, vaddr))
  211. #endif
  212. #define __test_and_clear_bit(nr, vaddr) test_and_clear_bit(nr, vaddr)
  213. static inline int bchg_reg_test_and_change_bit(int nr,
  214. volatile unsigned long *vaddr)
  215. {
  216. char *p = (char *)vaddr + (nr ^ 31) / 8;
  217. char retval;
  218. __asm__ __volatile__ ("bchg %2,(%1); sne %0"
  219. : "=d" (retval)
  220. : "a" (p), "di" (nr & 7)
  221. : "memory");
  222. return retval;
  223. }
  224. static inline int bchg_mem_test_and_change_bit(int nr,
  225. volatile unsigned long *vaddr)
  226. {
  227. char *p = (char *)vaddr + (nr ^ 31) / 8;
  228. char retval;
  229. __asm__ __volatile__ ("bchg %2,%1; sne %0"
  230. : "=d" (retval), "+m" (*p)
  231. : "di" (nr & 7));
  232. return retval;
  233. }
  234. static inline int bfchg_mem_test_and_change_bit(int nr,
  235. volatile unsigned long *vaddr)
  236. {
  237. char retval;
  238. __asm__ __volatile__ ("bfchg %2{%1:#1}; sne %0"
  239. : "=d" (retval)
  240. : "d" (nr ^ 31), "o" (*vaddr)
  241. : "memory");
  242. return retval;
  243. }
  244. #if defined(CONFIG_COLDFIRE)
  245. #define test_and_change_bit(nr, vaddr) bchg_reg_test_and_change_bit(nr, vaddr)
  246. #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
  247. #define test_and_change_bit(nr, vaddr) bchg_mem_test_and_change_bit(nr, vaddr)
  248. #else
  249. #define test_and_change_bit(nr, vaddr) (__builtin_constant_p(nr) ? \