123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378 |
- #ifndef _M68K_BITOPS_H
- #define _M68K_BITOPS_H
- /*
- * Copyright 1992, Linus Torvalds.
- *
- * This file is subject to the terms and conditions of the GNU General Public
- * License. See the file COPYING in the main directory of this archive
- * for more details.
- */
- #ifndef _LINUX_BITOPS_H
- #error only <linux/bitops.h> can be included directly
- #endif
- #include <linux/compiler.h>
- /*
- * Bit access functions vary across the ColdFire and 68k families.
- * So we will break them out here, and then macro in the ones we want.
- *
- * ColdFire - supports standard bset/bclr/bchg with register operand only
- * 68000 - supports standard bset/bclr/bchg with memory operand
- * >= 68020 - also supports the bfset/bfclr/bfchg instructions
- *
- * Although it is possible to use only the bset/bclr/bchg with register
- * operands on all platforms you end up with larger generated code.
- * So we use the best form possible on a given platform.
- */
- static inline void bset_reg_set_bit(int nr, volatile unsigned long *vaddr)
- {
- char *p = (char *)vaddr + (nr ^ 31) / 8;
- __asm__ __volatile__ ("bset %1,(%0)"
- :
- : "a" (p), "di" (nr & 7)
- : "memory");
- }
- static inline void bset_mem_set_bit(int nr, volatile unsigned long *vaddr)
- {
- char *p = (char *)vaddr + (nr ^ 31) / 8;
- __asm__ __volatile__ ("bset %1,%0"
- : "+m" (*p)
- : "di" (nr & 7));
- }
- static inline void bfset_mem_set_bit(int nr, volatile unsigned long *vaddr)
- {
- __asm__ __volatile__ ("bfset %1{%0:#1}"
- :
- : "d" (nr ^ 31), "o" (*vaddr)
- : "memory");
- }
- #if defined(CONFIG_COLDFIRE)
- #define set_bit(nr, vaddr) bset_reg_set_bit(nr, vaddr)
- #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
- #define set_bit(nr, vaddr) bset_mem_set_bit(nr, vaddr)
- #else
- #define set_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
- bset_mem_set_bit(nr, vaddr) : \
- bfset_mem_set_bit(nr, vaddr))
- #endif
- #define __set_bit(nr, vaddr) set_bit(nr, vaddr)
- /*
- * clear_bit() doesn't provide any barrier for the compiler.
- */
- #define smp_mb__before_clear_bit() barrier()
- #define smp_mb__after_clear_bit() barrier()
- static inline void bclr_reg_clear_bit(int nr, volatile unsigned long *vaddr)
- {
- char *p = (char *)vaddr + (nr ^ 31) / 8;
- __asm__ __volatile__ ("bclr %1,(%0)"
- :
- : "a" (p), "di" (nr & 7)
- : "memory");
- }
- static inline void bclr_mem_clear_bit(int nr, volatile unsigned long *vaddr)
- {
- char *p = (char *)vaddr + (nr ^ 31) / 8;
- __asm__ __volatile__ ("bclr %1,%0"
- : "+m" (*p)
- : "di" (nr & 7));
- }
- static inline void bfclr_mem_clear_bit(int nr, volatile unsigned long *vaddr)
- {
- __asm__ __volatile__ ("bfclr %1{%0:#1}"
- :
- : "d" (nr ^ 31), "o" (*vaddr)
- : "memory");
- }
- #if defined(CONFIG_COLDFIRE)
- #define clear_bit(nr, vaddr) bclr_reg_clear_bit(nr, vaddr)
- #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
- #define clear_bit(nr, vaddr) bclr_mem_clear_bit(nr, vaddr)
- #else
- #define clear_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
- bclr_mem_clear_bit(nr, vaddr) : \
- bfclr_mem_clear_bit(nr, vaddr))
- #endif
- #define __clear_bit(nr, vaddr) clear_bit(nr, vaddr)
- static inline void bchg_reg_change_bit(int nr, volatile unsigned long *vaddr)
- {
- char *p = (char *)vaddr + (nr ^ 31) / 8;
- __asm__ __volatile__ ("bchg %1,(%0)"
- :
- : "a" (p), "di" (nr & 7)
- : "memory");
- }
- static inline void bchg_mem_change_bit(int nr, volatile unsigned long *vaddr)
- {
- char *p = (char *)vaddr + (nr ^ 31) / 8;
- __asm__ __volatile__ ("bchg %1,%0"
- : "+m" (*p)
- : "di" (nr & 7));
- }
- static inline void bfchg_mem_change_bit(int nr, volatile unsigned long *vaddr)
- {
- __asm__ __volatile__ ("bfchg %1{%0:#1}"
- :
- : "d" (nr ^ 31), "o" (*vaddr)
- : "memory");
- }
- #if defined(CONFIG_COLDFIRE)
- #define change_bit(nr, vaddr) bchg_reg_change_bit(nr, vaddr)
- #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
- #define change_bit(nr, vaddr) bchg_mem_change_bit(nr, vaddr)
- #else
- #define change_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
- bchg_mem_change_bit(nr, vaddr) : \
- bfchg_mem_change_bit(nr, vaddr))
- #endif
- #define __change_bit(nr, vaddr) change_bit(nr, vaddr)
- static inline int test_bit(int nr, const unsigned long *vaddr)
- {
- return (vaddr[nr >> 5] & (1UL << (nr & 31))) != 0;
- }
- static inline int bset_reg_test_and_set_bit(int nr,
- volatile unsigned long *vaddr)
- {
- char *p = (char *)vaddr + (nr ^ 31) / 8;
- char retval;
- __asm__ __volatile__ ("bset %2,(%1); sne %0"
- : "=d" (retval)
- : "a" (p), "di" (nr & 7)
- : "memory");
- return retval;
- }
- static inline int bset_mem_test_and_set_bit(int nr,
- volatile unsigned long *vaddr)
- {
- char *p = (char *)vaddr + (nr ^ 31) / 8;
- char retval;
- __asm__ __volatile__ ("bset %2,%1; sne %0"
- : "=d" (retval), "+m" (*p)
- : "di" (nr & 7));
- return retval;
- }
- static inline int bfset_mem_test_and_set_bit(int nr,
- volatile unsigned long *vaddr)
- {
- char retval;
- __asm__ __volatile__ ("bfset %2{%1:#1}; sne %0"
- : "=d" (retval)
- : "d" (nr ^ 31), "o" (*vaddr)
- : "memory");
- return retval;
- }
- #if defined(CONFIG_COLDFIRE)
- #define test_and_set_bit(nr, vaddr) bset_reg_test_and_set_bit(nr, vaddr)
- #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
- #define test_and_set_bit(nr, vaddr) bset_mem_test_and_set_bit(nr, vaddr)
- #else
- #define test_and_set_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
- bset_mem_test_and_set_bit(nr, vaddr) : \
- bfset_mem_test_and_set_bit(nr, vaddr))
- #endif
- #define __test_and_set_bit(nr, vaddr) test_and_set_bit(nr, vaddr)
- static inline int bclr_reg_test_and_clear_bit(int nr,
- volatile unsigned long *vaddr)
- {
- char *p = (char *)vaddr + (nr ^ 31) / 8;
- char retval;
- __asm__ __volatile__ ("bclr %2,(%1); sne %0"
- : "=d" (retval)
- : "a" (p), "di" (nr & 7)
- : "memory");
- return retval;
- }
- static inline int bclr_mem_test_and_clear_bit(int nr,
- volatile unsigned long *vaddr)
- {
- char *p = (char *)vaddr + (nr ^ 31) / 8;
- char retval;
- __asm__ __volatile__ ("bclr %2,%1; sne %0"
- : "=d" (retval), "+m" (*p)
- : "di" (nr & 7));
- return retval;
- }
- static inline int bfclr_mem_test_and_clear_bit(int nr,
- volatile unsigned long *vaddr)
- {
- char retval;
- __asm__ __volatile__ ("bfclr %2{%1:#1}; sne %0"
- : "=d" (retval)
- : "d" (nr ^ 31), "o" (*vaddr)
- : "memory");
- return retval;
- }
- #if defined(CONFIG_COLDFIRE)
- #define test_and_clear_bit(nr, vaddr) bclr_reg_test_and_clear_bit(nr, vaddr)
- #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
- #define test_and_clear_bit(nr, vaddr) bclr_mem_test_and_clear_bit(nr, vaddr)
- #else
- #define test_and_clear_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
- bclr_mem_test_and_clear_bit(nr, vaddr) : \
- bfclr_mem_test_and_clear_bit(nr, vaddr))
- #endif
- #define __test_and_clear_bit(nr, vaddr) test_and_clear_bit(nr, vaddr)
- static inline int bchg_reg_test_and_change_bit(int nr,
- volatile unsigned long *vaddr)
- {
- char *p = (char *)vaddr + (nr ^ 31) / 8;
- char retval;
- __asm__ __volatile__ ("bchg %2,(%1); sne %0"
- : "=d" (retval)
- : "a" (p), "di" (nr & 7)
- : "memory");
- return retval;
- }
- static inline int bchg_mem_test_and_change_bit(int nr,
- volatile unsigned long *vaddr)
- {
- char *p = (char *)vaddr + (nr ^ 31) / 8;
- char retval;
- __asm__ __volatile__ ("bchg %2,%1; sne %0"
- : "=d" (retval), "+m" (*p)
- : "di" (nr & 7));
- return retval;
- }
- static inline int bfchg_mem_test_and_change_bit(int nr,
- volatile unsigned long *vaddr)
- {
- char retval;
- __asm__ __volatile__ ("bfchg %2{%1:#1}; sne %0"
- : "=d" (retval)
- : "d" (nr ^ 31), "o" (*vaddr)
- : "memory");
- return retval;
- }
- #if defined(CONFIG_COLDFIRE)
- #define test_and_change_bit(nr, vaddr) bchg_reg_test_and_change_bit(nr, vaddr)
- #elif defined(CONFIG_CPU_HAS_NO_BITFIELDS)
- #define test_and_change_bit(nr, vaddr) bchg_mem_test_and_change_bit(nr, vaddr)
- #else
- #define test_and_change_bit(nr, vaddr) (__builtin_constant_p(nr) ? \
- bchg_mem_test_and_change_bit(nr, vaddr) : \
- bfchg_mem_test_and_change_bit(nr, vaddr))
- #endif
- #define __test_and_change_bit(nr, vaddr) test_and_change_bit(nr, vaddr)
- /*
- * The true 68020 and more advanced processors support the "bfffo"
- * instruction for finding bits. ColdFire and simple 68000 parts
- * (including CPU32) do not support this. They simply use the generic
- * functions.
- */
- #if defined(CONFIG_CPU_HAS_NO_BITFIELDS)
- #include <asm-generic/bitops/find.h>
- #include <asm-generic/bitops/ffz.h>
- #else
- static inline int find_first_zero_bit(const unsigned long *vaddr,
- unsigned size)
- {
- const unsigned long *p = vaddr;
- int res = 32;
- unsigned int words;
- unsigned long num;
- if (!size)
- return 0;
- words = (size + 31) >> 5;
- while (!(num = ~*p++)) {
- if (!--words)
- goto out;
- }
- __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
- : "=d" (res) : "d" (num & -num));
- res ^= 31;
- out:
- res += ((long)p - (long)vaddr - 4) * 8;
- return res < size ? res : size;
- }
- #define find_first_zero_bit find_first_zero_bit
- static inline int find_next_zero_bit(const unsigned long *vaddr, int size,
- int offset)
- {
- const unsigned long *p = vaddr + (offset >> 5);
- int bit = offset & 31UL, res;
- if (offset >= size)
- return size;
- if (bit) {
- unsigned long num = ~*p++ & (~0UL << bit);
- offset -= bit;
- /* Look for zero in first longword */
- __asm__ __volatile__ ("bfffo %1{#0,#0},%0"
- : "=d" (res) : "d" (num & -num));
- if (res < 32) {
- offset += res ^ 31;
- return offset < size ? offset : size;
- }
- offset += 32;
- if (offset >= size)
- return size;
- }
- /* No zero yet, search remaining full bytes for a zero */
- return offset + find_first_zero_bit(p, size - offset);
- }
- #define find_next_zero_bit find_next_zero_bit
|