From 9999e48639b3cecb08ffb37358bcba3b48161b29 Mon Sep 17 00:00:00 2001 From: hc <hc@nodka.com> Date: Fri, 10 May 2024 08:50:17 +0000 Subject: [PATCH] add ax88772_rst --- kernel/arch/mips/include/asm/bitops.h | 421 ++++++++++++++++------------------------------------ 1 files changed, 132 insertions(+), 289 deletions(-) diff --git a/kernel/arch/mips/include/asm/bitops.h b/kernel/arch/mips/include/asm/bitops.h index da1b871..a747699 100644 --- a/kernel/arch/mips/include/asm/bitops.h +++ b/kernel/arch/mips/include/asm/bitops.h @@ -13,15 +13,54 @@ #error only <linux/bitops.h> can be included directly #endif +#include <linux/bits.h> #include <linux/compiler.h> #include <linux/types.h> #include <asm/barrier.h> #include <asm/byteorder.h> /* sigh ... */ #include <asm/compiler.h> #include <asm/cpu-features.h> +#include <asm/isa-rev.h> #include <asm/llsc.h> #include <asm/sgidefs.h> #include <asm/war.h> + +#define __bit_op(mem, insn, inputs...) do { \ + unsigned long temp; \ + \ + asm volatile( \ + " .set push \n" \ + " .set " MIPS_ISA_LEVEL " \n" \ + " " __SYNC(full, loongson3_war) " \n" \ + "1: " __LL "%0, %1 \n" \ + " " insn " \n" \ + " " __SC "%0, %1 \n" \ + " " __SC_BEQZ "%0, 1b \n" \ + " .set pop \n" \ + : "=&r"(temp), "+" GCC_OFF_SMALL_ASM()(mem) \ + : inputs \ + : __LLSC_CLOBBER); \ +} while (0) + +#define __test_bit_op(mem, ll_dst, insn, inputs...) ({ \ + unsigned long orig, temp; \ + \ + asm volatile( \ + " .set push \n" \ + " .set " MIPS_ISA_LEVEL " \n" \ + " " __SYNC(full, loongson3_war) " \n" \ + "1: " __LL ll_dst ", %2 \n" \ + " " insn " \n" \ + " " __SC "%1, %2 \n" \ + " " __SC_BEQZ "%1, 1b \n" \ + " .set pop \n" \ + : "=&r"(orig), "=&r"(temp), \ + "+" GCC_OFF_SMALL_ASM()(mem) \ + : inputs \ + : __LLSC_CLOBBER); \ + \ + orig; \ +}) /* * These are the "slower" versions of the functions and are in bitops.c. @@ -30,8 +69,6 @@ void __mips_set_bit(unsigned long nr, volatile unsigned long *addr); void __mips_clear_bit(unsigned long nr, volatile unsigned long *addr); void __mips_change_bit(unsigned long nr, volatile unsigned long *addr); -int __mips_test_and_set_bit(unsigned long nr, - volatile unsigned long *addr); int __mips_test_and_set_bit_lock(unsigned long nr, volatile unsigned long *addr); int __mips_test_and_clear_bit(unsigned long nr, @@ -52,44 +89,20 @@ */ static inline void set_bit(unsigned long nr, volatile unsigned long *addr) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - int bit = nr & SZLONG_MASK; - unsigned long temp; + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; - if (kernel_uses_llsc && R10000_LLSC_WAR) { - __asm__ __volatile__( - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # set_bit \n" - " or %0, %2 \n" - " " __SC "%0, %1 \n" - " beqzl %0, 1b \n" - " .set mips0 \n" - : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*m) - : "ir" (1UL << bit), GCC_OFF_SMALL_ASM() (*m)); -#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) - } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { - do { - __asm__ __volatile__( - " " __LL "%0, %1 # set_bit \n" - " " __INS "%0, %3, %2, 1 \n" - " " __SC "%0, %1 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (bit), "r" (~0)); - } while (unlikely(!temp)); -#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */ - } else if (kernel_uses_llsc) { - do { - __asm__ __volatile__( - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # set_bit \n" - " or %0, %2 \n" - " " __SC "%0, %1 \n" - " .set mips0 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (1UL << bit)); - } while (unlikely(!temp)); - } else + if (!kernel_uses_llsc) { __mips_set_bit(nr, addr); + return; + } + + if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit) && (bit >= 16)) { + __bit_op(*m, __INS "%0, %3, %2, 1", "i"(bit), "r"(~0)); + return; + } + + __bit_op(*m, "or\t%0, %2", "ir"(BIT(bit))); } /* @@ -104,44 +117,20 @@ */ static inline void clear_bit(unsigned long nr, volatile unsigned long *addr) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - int bit = nr & SZLONG_MASK; - unsigned long temp; + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; - if (kernel_uses_llsc && R10000_LLSC_WAR) { - __asm__ __volatile__( - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # clear_bit \n" - " and %0, %2 \n" - " " __SC "%0, %1 \n" - " beqzl %0, 1b \n" - " .set mips0 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (~(1UL << bit))); -#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) - } else if (kernel_uses_llsc && __builtin_constant_p(bit)) { - do { - __asm__ __volatile__( - " " __LL "%0, %1 # clear_bit \n" - " " __INS "%0, $0, %2, 1 \n" - " " __SC "%0, %1 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (bit)); - } while (unlikely(!temp)); -#endif /* CONFIG_CPU_MIPSR2 || CONFIG_CPU_MIPSR6 */ - } else if (kernel_uses_llsc) { - do { - __asm__ __volatile__( - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # clear_bit \n" - " and %0, %2 \n" - " " __SC "%0, %1 \n" - " .set mips0 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (~(1UL << bit))); - } while (unlikely(!temp)); - } else + if (!kernel_uses_llsc) { __mips_clear_bit(nr, addr); + return; + } + + if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(bit)) { + __bit_op(*m, __INS "%0, $0, %2, 1", "i"(bit)); + return; + } + + __bit_op(*m, "and\t%0, %2", "ir"(~BIT(bit))); } /* @@ -169,93 +158,15 @@ */ static inline void change_bit(unsigned long nr, volatile unsigned long *addr) { - int bit = nr & SZLONG_MASK; + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; - if (kernel_uses_llsc && R10000_LLSC_WAR) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - __asm__ __volatile__( - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # change_bit \n" - " xor %0, %2 \n" - " " __SC "%0, %1 \n" - " beqzl %0, 1b \n" - " .set mips0 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (1UL << bit)); - } else if (kernel_uses_llsc) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - do { - __asm__ __volatile__( - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # change_bit \n" - " xor %0, %2 \n" - " " __SC "%0, %1 \n" - " .set mips0 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m) - : "ir" (1UL << bit)); - } while (unlikely(!temp)); - } else + if (!kernel_uses_llsc) { __mips_change_bit(nr, addr); -} + return; + } -/* - * test_and_set_bit - Set a bit and return its old value - * @nr: Bit to set - * @addr: Address to count from - * - * This operation is atomic and cannot be reordered. - * It also implies a memory barrier. - */ -static inline int test_and_set_bit(unsigned long nr, - volatile unsigned long *addr) -{ - int bit = nr & SZLONG_MASK; - unsigned long res; - - smp_mb__before_llsc(); - - if (kernel_uses_llsc && R10000_LLSC_WAR) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - __asm__ __volatile__( - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # test_and_set_bit \n" - " or %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " beqzl %2, 1b \n" - " and %2, %0, %3 \n" - " .set mips0 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : "memory"); - } else if (kernel_uses_llsc) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - do { - __asm__ __volatile__( - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # test_and_set_bit \n" - " or %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " .set mips0 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : "memory"); - } while (unlikely(!res)); - - res = temp & (1UL << bit); - } else - res = __mips_test_and_set_bit(nr, addr); - - smp_llsc_mb(); - - return res != 0; + __bit_op(*m, "xor\t%0, %2", "ir"(BIT(bit))); } /* @@ -269,48 +180,39 @@ static inline int test_and_set_bit_lock(unsigned long nr, volatile unsigned long *addr) { - int bit = nr & SZLONG_MASK; - unsigned long res; + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; + unsigned long res, orig; - if (kernel_uses_llsc && R10000_LLSC_WAR) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - __asm__ __volatile__( - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # test_and_set_bit \n" - " or %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " beqzl %2, 1b \n" - " and %2, %0, %3 \n" - " .set mips0 \n" - : "=&r" (temp), "+m" (*m), "=&r" (res) - : "r" (1UL << bit) - : "memory"); - } else if (kernel_uses_llsc) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - do { - __asm__ __volatile__( - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # test_and_set_bit \n" - " or %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " .set mips0 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : "memory"); - } while (unlikely(!res)); - - res = temp & (1UL << bit); - } else + if (!kernel_uses_llsc) { res = __mips_test_and_set_bit_lock(nr, addr); + } else { + orig = __test_bit_op(*m, "%0", + "or\t%1, %0, %3", + "ir"(BIT(bit))); + res = (orig & BIT(bit)) != 0; + } smp_llsc_mb(); - return res != 0; + return res; } + +/* + * test_and_set_bit - Set a bit and return its old value + * @nr: Bit to set + * @addr: Address to count from + * + * This operation is atomic and cannot be reordered. + * It also implies a memory barrier. + */ +static inline int test_and_set_bit(unsigned long nr, + volatile unsigned long *addr) +{ + smp_mb__before_atomic(); + return test_and_set_bit_lock(nr, addr); +} + /* * test_and_clear_bit - Clear a bit and return its old value * @nr: Bit to clear @@ -322,67 +224,30 @@ static inline int test_and_clear_bit(unsigned long nr, volatile unsigned long *addr) { - int bit = nr & SZLONG_MASK; - unsigned long res; + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; + unsigned long res, orig; - smp_mb__before_llsc(); + smp_mb__before_atomic(); - if (kernel_uses_llsc && R10000_LLSC_WAR) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - __asm__ __volatile__( - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # test_and_clear_bit \n" - " or %2, %0, %3 \n" - " xor %2, %3 \n" - " " __SC "%2, %1 \n" - " beqzl %2, 1b \n" - " and %2, %0, %3 \n" - " .set mips0 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : "memory"); -#if defined(CONFIG_CPU_MIPSR2) || defined(CONFIG_CPU_MIPSR6) - } else if (kernel_uses_llsc && __builtin_constant_p(nr)) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - do { - __asm__ __volatile__( - " " __LL "%0, %1 # test_and_clear_bit \n" - " " __EXT "%2, %0, %3, 1 \n" - " " __INS "%0, $0, %3, 1 \n" - " " __SC "%0, %1 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "ir" (bit) - : "memory"); - } while (unlikely(!temp)); -#endif - } else if (kernel_uses_llsc) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - do { - __asm__ __volatile__( - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # test_and_clear_bit \n" - " or %2, %0, %3 \n" - " xor %2, %3 \n" - " " __SC "%2, %1 \n" - " .set mips0 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : "memory"); - } while (unlikely(!res)); - - res = temp & (1UL << bit); - } else + if (!kernel_uses_llsc) { res = __mips_test_and_clear_bit(nr, addr); + } else if ((MIPS_ISA_REV >= 2) && __builtin_constant_p(nr)) { + res = __test_bit_op(*m, "%1", + __EXT "%0, %1, %3, 1;" + __INS "%1, $0, %3, 1", + "i"(bit)); + } else { + orig = __test_bit_op(*m, "%0", + "or\t%1, %0, %3;" + "xor\t%1, %1, %3", + "ir"(BIT(bit))); + res = (orig & BIT(bit)) != 0; + } smp_llsc_mb(); - return res != 0; + return res; } /* @@ -396,50 +261,28 @@ static inline int test_and_change_bit(unsigned long nr, volatile unsigned long *addr) { - int bit = nr & SZLONG_MASK; - unsigned long res; + volatile unsigned long *m = &addr[BIT_WORD(nr)]; + int bit = nr % BITS_PER_LONG; + unsigned long res, orig; - smp_mb__before_llsc(); + smp_mb__before_atomic(); - if (kernel_uses_llsc && R10000_LLSC_WAR) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - __asm__ __volatile__( - " .set arch=r4000 \n" - "1: " __LL "%0, %1 # test_and_change_bit \n" - " xor %2, %0, %3 \n" - " " __SC "%2, %1 \n" - " beqzl %2, 1b \n" - " and %2, %0, %3 \n" - " .set mips0 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : "memory"); - } else if (kernel_uses_llsc) { - unsigned long *m = ((unsigned long *) addr) + (nr >> SZLONG_LOG); - unsigned long temp; - - do { - __asm__ __volatile__( - " .set "MIPS_ISA_ARCH_LEVEL" \n" - " " __LL "%0, %1 # test_and_change_bit \n" - " xor %2, %0, %3 \n" - " " __SC "\t%2, %1 \n" - " .set mips0 \n" - : "=&r" (temp), "+" GCC_OFF_SMALL_ASM() (*m), "=&r" (res) - : "r" (1UL << bit) - : "memory"); - } while (unlikely(!res)); - - res = temp & (1UL << bit); - } else + if (!kernel_uses_llsc) { res = __mips_test_and_change_bit(nr, addr); + } else { + orig = __test_bit_op(*m, "%0", + "xor\t%1, %0, %3", + "ir"(BIT(bit))); + res = (orig & BIT(bit)) != 0; + } smp_llsc_mb(); - return res != 0; + return res; } + +#undef __bit_op +#undef __test_bit_op #include <asm-generic/bitops/non-atomic.h> @@ -463,7 +306,7 @@ * Return the bit position (0..63) of the most significant 1 bit in a word * Returns -1 if no 1 bit exists */ -static inline unsigned long __fls(unsigned long word) +static __always_inline unsigned long __fls(unsigned long word) { int num; @@ -529,7 +372,7 @@ * Returns 0..SZLONG-1 * Undefined if no bit exists, so code should check against 0 first. */ -static inline unsigned long __ffs(unsigned long word) +static __always_inline unsigned long __ffs(unsigned long word) { return __fls(word & -word); } @@ -541,7 +384,7 @@ * This is defined the same way as ffs. * Note fls(0) = 0, fls(1) = 1, fls(0x80000000) = 32. */ -static inline int fls(int x) +static inline int fls(unsigned int x) { int r; -- Gitblit v1.6.2