diff mbox

[3/3] Implemented testandset() for Cortex-M3

Message ID 1414832419.11706.10.camel@mehome
State New
Headers show

Commit Message

sposelenov@emcraft.com Nov. 1, 2014, 9 a.m. UTC
commit 683a77455f2226419fe688dd2104ef1a391bb3b2
Author: Sergei Poselenov <sposelenov@emcraft.com>
Date:   Sun Oct 26 14:54:12 2014 +0400

    Implemented testandset() for Cortex-M3
    
    Signed-off-by: Sergei Poselenov <sposelenov@emcraft.com>

Comments

Bernhard Reutner-Fischer Nov. 13, 2014, 7 p.m. UTC | #1
On 1 November 2014 10:00, Sergei Poselenov <sposelenov@emcraft.com> wrote:
> commit 683a77455f2226419fe688dd2104ef1a391bb3b2
> Author: Sergei Poselenov <sposelenov@emcraft.com>
> Date:   Sun Oct 26 14:54:12 2014 +0400
>
>     Implemented testandset() for Cortex-M3

Don't we have some CAS support in libc/sysdeps/linux/arm/bits/atomic.h
that should have been used?

>
>     Signed-off-by: Sergei Poselenov <sposelenov@emcraft.com>
>
> diff --git a/extra/Configs/Config.arm b/extra/Configs/Config.arm
> index 0bb2971..5777d3b 100644
> --- a/extra/Configs/Config.arm
> +++ b/extra/Configs/Config.arm
> @@ -36,3 +36,10 @@ config USE_BX
>           Say 'y' to use BX to return from functions on your thumb-aware
>           processor. Say 'y' if you need to use interworking. Say 'n' if not.
>           It is safe to say 'y' even if you're not doing interworking.
> +
> +config USE_LDREXSTREX
> +       bool "Use load-store exclusive ASM ops (not supported in SmartFusion)"
> +       depends on COMPILE_IN_THUMB_MODE
> +       default y
> +       help
> +         Say 'y' to use LDREX/STREX ASM ops.

That sounds rather like you want to have it conditional on something like
__ARM_ARCH_6M__ or __ARM_ARCH >= 6 no?

thanks,

> diff --git a/libpthread/linuxthreads.old/sysdeps/arm/pt-machine.h b/libpthread/linuxthreads.old/sysdeps/arm/pt-machine.h
> index 583eb68..93804ba 100644
> --- a/libpthread/linuxthreads.old/sysdeps/arm/pt-machine.h
> +++ b/libpthread/linuxthreads.old/sysdeps/arm/pt-machine.h
> @@ -22,12 +22,50 @@
>  #ifndef _PT_MACHINE_H
>  #define _PT_MACHINE_H   1
>
> -#include <features.h>
> +#include <sys/syscall.h>
> +#include <unistd.h>
>
>  #ifndef PT_EI
>  # define PT_EI __extern_always_inline
>  #endif
>
> +#if defined(__thumb__)
> +#if defined(__USE_LDREXSTREX__)
> +PT_EI long int ldrex(int *spinlock)
> +{
> +       long int ret;
> +       __asm__ __volatile__(
> +               "ldrex %0, [%1]\n"
> +               : "=r"(ret)
> +               : "r"(spinlock) : "memory");
> +       return ret;
> +}
> +
> +PT_EI long int strex(int val, int *spinlock)
> +{
> +       long int ret;
> +       __asm__ __volatile__(
> +               "strex %0, %1, [%2]\n"
> +               : "=r"(ret)
> +               : "r" (val), "r"(spinlock) : "memory");
> +       return ret;
> +}
> +
> +/* Spinlock implementation; required.  */
> +PT_EI long int
> +testandset (int *spinlock)
> +{
> +  register unsigned int ret;
> +
> +  do {
> +         ret = ldrex(spinlock);
> +  } while (strex(1, spinlock));
> +
> +  return ret;
> +}
> +
> +#else /* __USE_LDREXSTREX__ */
> +
>  /* This will not work on ARM1 or ARM2 because SWP is lacking on those
>     machines.  Unfortunately we have no way to detect this at compile
>     time; let's hope nobody tries to use one.  */
> @@ -37,8 +75,6 @@ PT_EI long int testandset (int *spinlock);
>  PT_EI long int testandset (int *spinlock)
>  {
>    register unsigned int ret;
> -
> -#if defined(__thumb__)
>    void *pc;
>    __asm__ __volatile__(
>         ".align 0\n"
> @@ -51,15 +87,21 @@ PT_EI long int testandset (int *spinlock)
>         "\t.force_thumb"
>         : "=r"(ret), "=r"(pc)
>         : "0"(1), "r"(spinlock));
> -#else
> +  return ret;
> +}
> +#endif
> +#else /* __thumb__ */
> +
> +PT_EI long int testandset (int *spinlock);
> +PT_EI long int testandset (int *spinlock)
> +{
> +  register unsigned int ret;
>    __asm__ __volatile__("swp %0, %1, [%2]"
>                        : "=r"(ret)
>                        : "0"(1), "r"(spinlock));
> -#endif
> -
>    return ret;
>  }
> -
> +#endif
>
>  /* Get some notion of the current stack.  Need not be exactly the top
>     of the stack, just something somewhere in the current frame.  */
>
>
diff mbox

Patch

diff --git a/extra/Configs/Config.arm b/extra/Configs/Config.arm
index 0bb2971..5777d3b 100644
--- a/extra/Configs/Config.arm
+++ b/extra/Configs/Config.arm
@@ -36,3 +36,10 @@  config USE_BX
 	  Say 'y' to use BX to return from functions on your thumb-aware
 	  processor. Say 'y' if you need to use interworking. Say 'n' if not.
 	  It is safe to say 'y' even if you're not doing interworking.
+
+config USE_LDREXSTREX
+	bool "Use load-store exclusive ASM ops (not supported in SmartFusion)"
+	depends on COMPILE_IN_THUMB_MODE
+	default y
+	help
+	  Say 'y' to use LDREX/STREX ASM ops.
diff --git a/libpthread/linuxthreads.old/sysdeps/arm/pt-machine.h b/libpthread/linuxthreads.old/sysdeps/arm/pt-machine.h
index 583eb68..93804ba 100644
--- a/libpthread/linuxthreads.old/sysdeps/arm/pt-machine.h
+++ b/libpthread/linuxthreads.old/sysdeps/arm/pt-machine.h
@@ -22,12 +22,50 @@ 
 #ifndef _PT_MACHINE_H
 #define _PT_MACHINE_H   1
 
-#include <features.h>
+#include <sys/syscall.h>
+#include <unistd.h>
 
 #ifndef PT_EI
 # define PT_EI __extern_always_inline
 #endif
 
+#if defined(__thumb__)
+#if defined(__USE_LDREXSTREX__)
+PT_EI long int ldrex(int *spinlock)
+{
+	long int ret;
+	__asm__ __volatile__(
+		"ldrex %0, [%1]\n"
+		: "=r"(ret)
+		: "r"(spinlock) : "memory");
+	return ret;
+}
+
+PT_EI long int strex(int val, int *spinlock)
+{
+	long int ret;
+	__asm__ __volatile__(
+		"strex %0, %1, [%2]\n"
+		: "=r"(ret)
+		: "r" (val), "r"(spinlock) : "memory");
+	return ret;
+}
+
+/* Spinlock implementation; required.  */
+PT_EI long int
+testandset (int *spinlock)
+{
+  register unsigned int ret;
+
+  do {
+	  ret = ldrex(spinlock);
+  } while (strex(1, spinlock));
+
+  return ret;
+}
+
+#else /* __USE_LDREXSTREX__ */
+
 /* This will not work on ARM1 or ARM2 because SWP is lacking on those
    machines.  Unfortunately we have no way to detect this at compile
    time; let's hope nobody tries to use one.  */
@@ -37,8 +75,6 @@  PT_EI long int testandset (int *spinlock);
 PT_EI long int testandset (int *spinlock)
 {
   register unsigned int ret;
-
-#if defined(__thumb__)
   void *pc;
   __asm__ __volatile__(
 	".align 0\n"
@@ -51,15 +87,21 @@  PT_EI long int testandset (int *spinlock)
 	"\t.force_thumb"
 	: "=r"(ret), "=r"(pc)
 	: "0"(1), "r"(spinlock));
-#else
+  return ret;
+}
+#endif
+#else /* __thumb__ */
+
+PT_EI long int testandset (int *spinlock);
+PT_EI long int testandset (int *spinlock)
+{
+  register unsigned int ret;
   __asm__ __volatile__("swp %0, %1, [%2]"
 		       : "=r"(ret)
 		       : "0"(1), "r"(spinlock));
-#endif
-
   return ret;
 }
-
+#endif
 
 /* Get some notion of the current stack.  Need not be exactly the top
    of the stack, just something somewhere in the current frame.  */