@@ -34,7 +34,9 @@ static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
u32 __user *uaddr)
{
int oldval = 0, ret;
+ unsigned long amr;
+ amr = unlock_user_access();
pagefault_disable();
switch (op) {
@@ -62,6 +64,7 @@ static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
if (!ret)
*oval = oldval;
+ lock_user_access(amr);
return ret;
}
@@ -71,10 +74,12 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
{
int ret = 0;
u32 prev;
+ unsigned long amr;
if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
return -EFAULT;
+ amr = unlock_user_access();
__asm__ __volatile__ (
PPC_ATOMIC_ENTRY_BARRIER
"1: lwarx %1,0,%3 # futex_atomic_cmpxchg_inatomic\n\
@@ -95,6 +100,7 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
: "cc", "memory");
*uval = prev;
+ lock_user_access(amr);
return ret;
}
Wrap the futex operations in KHRAP locks and unlocks. Signed-off-by: Russell Currey <ruscur@russell.cc> --- arch/powerpc/include/asm/futex.h | 6 ++++++ 1 file changed, 6 insertions(+)