arch/x86/include/asm/bitops.h | 5 +++--
include/asm-generic/atomic.h | 2 +-
include/linux/compiler.h | 7 +++++++
3 files changed, 11 insertions(+), 3 deletions(-)
@@ -308,8 +308,9 @@ static inline int test_and_change_bit(int nr, volatile unsigned long *addr)
static __always_inline int constant_test_bit(unsigned int nr, const volatile unsigned long *addr)
{
- return ((1UL << (nr % BITS_PER_LONG)) &
- (addr[nr / BITS_PER_LONG])) != 0;
+ unsigned long *word = (nr / BITS_PER_LONG) + (unsigned long *)addr;
+ unsigned long bit = 1UL << (nr % BITS_PER_LONG);
+ return (bit & ACCESS_AT_MOST_ONCE(*word)) != 0;
}
static inline int variable_test_bit(int nr, volatile const unsigned long *addr)
@@ -39,7 +39,7 @@
* Atomically reads the value of @v.
*/
#ifndef atomic_read
-#define atomic_read(v) (*(volatile int *)&(v)->counter)
+#define atomic_read(v) ACCESS_AT_MOST_ONCE((v)->counter)
#endif
/**
@@ -308,4 +308,11 @@ void ftrace_likely_update(struct ftrace_branch_data *f, int val, int expect);
*/
#define ACCESS_ONCE(x) (*(volatile typeof(x) *)&(x))
+/*
+ * Like ACCESS_ONCE, but can be optimized away if nothing uses the value,
+ * and/or merged with previous non-ONCE accesses.
+ */
+#define ACCESS_AT_MOST_ONCE(x) \
+ ({ typeof(x) __y; asm("":"=r" (__y):"0" (x)); __y; })
+
#endif /* __LINUX_COMPILER_H */