@@ -20,10 +20,7 @@
*
* Atomically reads the value of @v.
*/
-static inline int atomic_read(const atomic_t *v)
-{
- return (*(volatile int *)&(v)->counter);
-}
+#define atomic_read(v) ACCESS_AT_MOST_ONCE(*(int *)&(v)->counter)
/**
* atomic_set - set atomic variable
@@ -1,6 +1,7 @@
#ifndef _ASM_X86_ATOMIC64_64_H
#define _ASM_X86_ATOMIC64_64_H
+#include <linux/compiler.h>
#include <linux/types.h>
#include <asm/alternative.h>
#include <asm/cmpxchg.h>
@@ -16,10 +17,7 @@
* Atomically reads the value of @v.
* Doesn't imply a read memory barrier.
*/
-static inline long atomic64_read(const atomic64_t *v)
-{
- return (*(volatile long *)&(v)->counter);
-}
+#define atomic64_read(v) ACCESS_AT_MOST_ONCE(*(long *)&(v)->counter);
/**
* atomic64_set - set atomic64 variable
@@ -308,8 +308,10 @@ static inline int test_and_change_bit(int nr, volatile unsigned long *addr)
static __always_inline int constant_test_bit(unsigned int nr, const volatile unsigned long *addr)
{
- return ((1UL << (nr % BITS_PER_LONG)) &
- (addr[nr / BITS_PER_LONG])) != 0;
+ unsigned long *word = (unsigned long *)addr + (nr / BITS_PER_LONG);
+ unsigned long bit = 1UL << (nr % BITS_PER_LONG);
+
+ return (bit & ACCESS_AT_MOST_ONCE(*word)) != 0;
}
static inline int variable_test_bit(int nr, volatile const unsigned long *addr)
@@ -39,7 +39,7 @@
* Atomically reads the value of @v.
*/
#ifndef atomic_read
-#define atomic_read(v) (*(volatile int *)&(v)->counter)
+#define atomic_read(v) ACCESS_AT_MOST_ONCE(*(int *)&(v)->counter)
#endif
/**
@@ -308,4 +308,14 @@ void ftrace_likely_update(struct ftrace_branch_data *f, int val, int expect);
*/
#define ACCESS_ONCE(x) (*(volatile typeof(x) *)&(x))
+/*
+ * Like ACCESS_ONCE, but can be optimized away if nothing uses the value,
+ * and/or merged with previous non-ONCE accesses.
+ */
+#define ACCESS_AT_MOST_ONCE(x) \
+ ({ typeof(x) __y; \
+ asm("":"=r" (__y):"0" (x)); \
+ __y; \
+ })
+
#endif /* __LINUX_COMPILER_H */