lkml.org 
[lkml]   [2022]   [Oct]   [10]   [last100]   RSS Feed
Views: [wrap][no wrap]   [headers]  [forward] 
 
Messages in this thread
Patch in this message
/
From
Subject[PATCH 5.19 07/48] provide arch_test_bit_acquire for architectures that define test_bit
Date
From: Mikulas Patocka <mpatocka@redhat.com>

commit d6ffe6067a54972564552ea45d320fb98db1ac5e upstream.

Some architectures define their own arch_test_bit and they also need
arch_test_bit_acquire, otherwise they won't compile. We also clean up
the code by using the generic test_bit if that is equivalent to the
arch-specific version.

Signed-off-by: Mikulas Patocka <mpatocka@redhat.com>
Cc: stable@vger.kernel.org
Fixes: 8238b4579866 ("wait_on_bit: add an acquire memory barrier")
Signed-off-by: Linus Torvalds <torvalds@linux-foundation.org>
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
---
arch/alpha/include/asm/bitops.h | 7 +++++++
arch/hexagon/include/asm/bitops.h | 15 +++++++++++++++
arch/ia64/include/asm/bitops.h | 7 +++++++
arch/m68k/include/asm/bitops.h | 6 ++++++
arch/s390/include/asm/bitops.h | 7 +++++++
arch/sh/include/asm/bitops-op32.h | 7 +++++++
6 files changed, 49 insertions(+)

--- a/arch/alpha/include/asm/bitops.h
+++ b/arch/alpha/include/asm/bitops.h
@@ -289,6 +289,13 @@ test_bit(int nr, const volatile void * a
return (1UL & (((const int *) addr)[nr >> 5] >> (nr & 31))) != 0UL;
}

+static __always_inline bool
+test_bit_acquire(unsigned long nr, const volatile unsigned long *addr)
+{
+ unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
+ return 1UL & (smp_load_acquire(p) >> (nr & (BITS_PER_LONG-1)));
+}
+
/*
* ffz = Find First Zero in word. Undefined if no zero exists,
* so code should check against ~0UL first..
--- a/arch/hexagon/include/asm/bitops.h
+++ b/arch/hexagon/include/asm/bitops.h
@@ -172,7 +172,22 @@ static inline int __test_bit(int nr, con
return retval;
}

+static inline int __test_bit_acquire(int nr, const volatile unsigned long *addr)
+{
+ int retval;
+
+ asm volatile(
+ "{P0 = tstbit(%1,%2); if (P0.new) %0 = #1; if (!P0.new) %0 = #0;}\n"
+ : "=&r" (retval)
+ : "r" (addr[BIT_WORD(nr)]), "r" (nr % BITS_PER_LONG)
+ : "p0", "memory"
+ );
+
+ return retval;
+}
+
#define test_bit(nr, addr) __test_bit(nr, addr)
+#define test_bit_acquire(nr, addr) __test_bit_acquire(nr, addr)

/*
* ffz - find first zero in word.
--- a/arch/ia64/include/asm/bitops.h
+++ b/arch/ia64/include/asm/bitops.h
@@ -337,6 +337,13 @@ test_bit (int nr, const volatile void *a
return 1 & (((const volatile __u32 *) addr)[nr >> 5] >> (nr & 31));
}

+static __always_inline bool
+test_bit_acquire(unsigned long nr, const volatile unsigned long *addr)
+{
+ unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
+ return 1UL & (smp_load_acquire(p) >> (nr & (BITS_PER_LONG-1)));
+}
+
/**
* ffz - find the first zero bit in a long word
* @x: The long word to find the bit in
--- a/arch/m68k/include/asm/bitops.h
+++ b/arch/m68k/include/asm/bitops.h
@@ -153,6 +153,12 @@ static inline int test_bit(int nr, const
return (vaddr[nr >> 5] & (1UL << (nr & 31))) != 0;
}

+static __always_inline bool
+test_bit_acquire(unsigned long nr, const volatile unsigned long *addr)
+{
+ unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
+ return 1UL & (smp_load_acquire(p) >> (nr & (BITS_PER_LONG-1)));
+}

static inline int bset_reg_test_and_set_bit(int nr,
volatile unsigned long *vaddr)
--- a/arch/s390/include/asm/bitops.h
+++ b/arch/s390/include/asm/bitops.h
@@ -184,6 +184,13 @@ static inline bool arch_test_bit(unsigne
return *addr & mask;
}

+static __always_inline bool
+arch_test_bit_acquire(unsigned long nr, const volatile unsigned long *addr)
+{
+ unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
+ return 1UL & (smp_load_acquire(p) >> (nr & (BITS_PER_LONG-1)));
+}
+
static inline bool arch_test_and_set_bit_lock(unsigned long nr,
volatile unsigned long *ptr)
{
--- a/arch/sh/include/asm/bitops-op32.h
+++ b/arch/sh/include/asm/bitops-op32.h
@@ -138,4 +138,11 @@ static inline int test_bit(int nr, const
return 1UL & (addr[BIT_WORD(nr)] >> (nr & (BITS_PER_LONG-1)));
}

+static __always_inline bool
+test_bit_acquire(unsigned long nr, const volatile unsigned long *addr)
+{
+ unsigned long *p = ((unsigned long *)addr) + BIT_WORD(nr);
+ return 1UL & (smp_load_acquire(p) >> (nr & (BITS_PER_LONG-1)));
+}
+
#endif /* __ASM_SH_BITOPS_OP32_H */

\
 
 \ /
  Last update: 2022-10-10 09:13    [W:0.194 / U:0.056 seconds]
©2003-2020 Jasper Spaans|hosted at Digital Ocean and TransIP|Read the blog|Advertise on this site