[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[Xen-devel] [PATCH 17/45] xen: arm64: xchg and cmpxchg



Signed-off-by: Ian Campbell <ian.campbell@xxxxxxxxxx>
---
 xen/include/asm-arm/arm32/system.h |  115 ++++++++++++++++++++++++++
 xen/include/asm-arm/arm64/system.h |  155 ++++++++++++++++++++++++++++++++++++
 xen/include/asm-arm/system.h       |  114 --------------------------
 3 files changed, 270 insertions(+), 114 deletions(-)

diff --git a/xen/include/asm-arm/arm32/system.h 
b/xen/include/asm-arm/arm32/system.h
index 91098a0..9dbe8e3 100644
--- a/xen/include/asm-arm/arm32/system.h
+++ b/xen/include/asm-arm/arm32/system.h
@@ -18,6 +18,121 @@
 #define smp_rmb()       dmb()
 #define smp_wmb()       dmb()
 
+extern void __bad_xchg(volatile void *, int);
+
+static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int 
size)
+{
+        unsigned long ret;
+        unsigned int tmp;
+
+        smp_mb();
+
+        switch (size) {
+        case 1:
+                asm volatile("@ __xchg1\n"
+                "1:     ldrexb  %0, [%3]\n"
+                "       strexb  %1, %2, [%3]\n"
+                "       teq     %1, #0\n"
+                "       bne     1b"
+                        : "=&r" (ret), "=&r" (tmp)
+                        : "r" (x), "r" (ptr)
+                        : "memory", "cc");
+                break;
+        case 4:
+                asm volatile("@ __xchg4\n"
+                "1:     ldrex   %0, [%3]\n"
+                "       strex   %1, %2, [%3]\n"
+                "       teq     %1, #0\n"
+                "       bne     1b"
+                        : "=&r" (ret), "=&r" (tmp)
+                        : "r" (x), "r" (ptr)
+                        : "memory", "cc");
+                break;
+        default:
+                __bad_xchg(ptr, size), ret = 0;
+                break;
+        }
+        smp_mb();
+
+        return ret;
+}
+
+/*
+ * Atomic compare and exchange.  Compare OLD with MEM, if identical,
+ * store NEW in MEM.  Return the initial value in MEM.  Success is
+ * indicated by comparing RETURN with OLD.
+ */
+
+extern void __bad_cmpxchg(volatile void *ptr, int size);
+
+static always_inline unsigned long __cmpxchg(
+    volatile void *ptr, unsigned long old, unsigned long new, int size)
+{
+    unsigned long /*long*/ oldval, res;
+
+    switch (size) {
+    case 1:
+        do {
+            asm volatile("@ __cmpxchg1\n"
+                         "       ldrexb  %1, [%2]\n"
+                         "       mov     %0, #0\n"
+                         "       teq     %1, %3\n"
+                         "       strexbeq %0, %4, [%2]\n"
+                         : "=&r" (res), "=&r" (oldval)
+                         : "r" (ptr), "Ir" (old), "r" (new)
+                         : "memory", "cc");
+        } while (res);
+        break;
+    case 2:
+        do {
+            asm volatile("@ __cmpxchg2\n"
+                         "       ldrexh  %1, [%2]\n"
+                         "       mov     %0, #0\n"
+                         "       teq     %1, %3\n"
+                         "       strexheq %0, %4, [%2]\n"
+                         : "=&r" (res), "=&r" (oldval)
+                         : "r" (ptr), "Ir" (old), "r" (new)
+                         : "memory", "cc");
+        } while (res);
+        break;
+    case 4:
+        do {
+            asm volatile("@ __cmpxchg4\n"
+                         "       ldrex   %1, [%2]\n"
+                         "       mov     %0, #0\n"
+                         "       teq     %1, %3\n"
+                         "       strexeq %0, %4, [%2]\n"
+                         : "=&r" (res), "=&r" (oldval)
+                         : "r" (ptr), "Ir" (old), "r" (new)
+                         : "memory", "cc");
+        } while (res);
+        break;
+#if 0
+    case 8:
+        do {
+            asm volatile("@ __cmpxchg8\n"
+                         "       ldrexd   %1, [%2]\n"
+                         "       mov      %0, #0\n"
+                         "       teq      %1, %3\n"
+                         "       strexdeq %0, %4, [%2]\n"
+                         : "=&r" (res), "=&r" (oldval)
+                         : "r" (ptr), "Ir" (old), "r" (new)
+                         : "memory", "cc");
+        } while (res);
+        break;
+#endif
+    default:
+        __bad_cmpxchg(ptr, size);
+        oldval = 0;
+    }
+
+    return oldval;
+}
+
+#define cmpxchg(ptr,o,n)                                                \
+    ((__typeof__(*(ptr)))__cmpxchg((ptr),(unsigned long)(o),            \
+                                   (unsigned long)(n),sizeof(*(ptr))))
+
 #endif
 /*
  * Local variables:
diff --git a/xen/include/asm-arm/arm64/system.h 
b/xen/include/asm-arm/arm64/system.h
index 33c031d..6fd26f8 100644
--- a/xen/include/asm-arm/arm64/system.h
+++ b/xen/include/asm-arm/arm64/system.h
@@ -17,6 +17,161 @@
 #define smp_rmb()       asm volatile("dmb ishld" : : : "memory")
 #define smp_wmb()       asm volatile("dmb ishst" : : : "memory")
 
+
+extern void __bad_xchg(volatile void *, int);
+
+static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int 
size)
+{
+        unsigned long ret, tmp;
+
+        switch (size) {
+        case 1:
+                asm volatile("//        __xchg1\n"
+                "1:     ldaxrb  %w0, [%3]\n"
+                "       stlxrb  %w1, %w2, [%3]\n"
+                "       cbnz    %w1, 1b\n"
+                        : "=&r" (ret), "=&r" (tmp)
+                        : "r" (x), "r" (ptr)
+                        : "memory", "cc");
+                break;
+        case 2:
+                asm volatile("//        __xchg2\n"
+                "1:     ldaxrh  %w0, [%3]\n"
+                "       stlxrh  %w1, %w2, [%3]\n"
+                "       cbnz    %w1, 1b\n"
+                        : "=&r" (ret), "=&r" (tmp)
+                        : "r" (x), "r" (ptr)
+                        : "memory", "cc");
+                break;
+        case 4:
+                asm volatile("//        __xchg4\n"
+                "1:     ldaxr   %w0, [%3]\n"
+                "       stlxr   %w1, %w2, [%3]\n"
+                "       cbnz    %w1, 1b\n"
+                        : "=&r" (ret), "=&r" (tmp)
+                        : "r" (x), "r" (ptr)
+                        : "memory", "cc");
+                break;
+        case 8:
+                asm volatile("//        __xchg8\n"
+                "1:     ldaxr   %0, [%3]\n"
+                "       stlxr   %w1, %2, [%3]\n"
+                "       cbnz    %w1, 1b\n"
+                        : "=&r" (ret), "=&r" (tmp)
+                        : "r" (x), "r" (ptr)
+                        : "memory", "cc");
+                break;
+        default:
+                __bad_xchg(ptr, size), ret = 0;
+                break;
+        }
+
+        return ret;
+}
+
+#define xchg(ptr,x) \
+        ((__typeof__(*(ptr)))__xchg((unsigned long)(x),(ptr),sizeof(*(ptr))))
+
+extern void __bad_cmpxchg(volatile void *ptr, int size);
+
+static inline unsigned long __cmpxchg(volatile void *ptr, unsigned long old,
+                                      unsigned long new, int size)
+{
+        unsigned long oldval = 0, res;
+
+        switch (size) {
+        case 1:
+                do {
+                        asm volatile("// __cmpxchg1\n"
+                        "       ldxrb   %w1, [%2]\n"
+                        "       mov     %w0, #0\n"
+                        "       cmp     %w1, %w3\n"
+                        "       b.ne    1f\n"
+                        "       stxrb   %w0, %w4, [%2]\n"
+                        "1:\n"
+                                : "=&r" (res), "=&r" (oldval)
+                                : "r" (ptr), "Ir" (old), "r" (new)
+                                : "cc");
+                } while (res);
+                break;
+
+        case 2:
+                do {
+                        asm volatile("// __cmpxchg2\n"
+                        "       ldxrh   %w1, [%2]\n"
+                        "       mov     %w0, #0\n"
+                        "       cmp     %w1, %w3\n"
+                        "       b.ne    1f\n"
+                        "       stxrh   %w0, %w4, [%2]\n"
+                        "1:\n"
+                                : "=&r" (res), "=&r" (oldval)
+                                : "r" (ptr), "Ir" (old), "r" (new)
+                                : "memory", "cc");
+                } while (res);
+                break;
+
+        case 4:
+                do {
+                        asm volatile("// __cmpxchg4\n"
+                        "       ldxr    %w1, [%2]\n"
+                        "       mov     %w0, #0\n"
+                        "       cmp     %w1, %w3\n"
+                        "       b.ne    1f\n"
+                        "       stxr    %w0, %w4, [%2]\n"
+                        "1:\n"
+                                : "=&r" (res), "=&r" (oldval)
+                                : "r" (ptr), "Ir" (old), "r" (new)
+                                : "cc");
+                } while (res);
+                break;
+
+        case 8:
+                do {
+                        asm volatile("// __cmpxchg8\n"
+                        "       ldxr    %1, [%2]\n"
+                        "       mov     %w0, #0\n"
+                        "       cmp     %1, %3\n"
+                        "       b.ne    1f\n"
+                        "       stxr    %w0, %4, [%2]\n"
+                        "1:\n"
+                                : "=&r" (res), "=&r" (oldval)
+                                : "r" (ptr), "Ir" (old), "r" (new)
+                                : "cc");
+                } while (res);
+                break;
+
+        default:
+               __bad_cmpxchg(ptr, size);
+               oldval = 0;
+        }
+
+        return oldval;
+}
+
+static inline unsigned long __cmpxchg_mb(volatile void *ptr, unsigned long old,
+                                         unsigned long new, int size)
+{
+        unsigned long ret;
+
+        smp_mb();
+        ret = __cmpxchg(ptr, old, new, size);
+        smp_mb();
+
+        return ret;
+}
+
+#define cmpxchg(ptr,o,n)                                                \
+        ((__typeof__(*(ptr)))__cmpxchg_mb((ptr),                        \
+                                          (unsigned long)(o),           \
+                                          (unsigned long)(n),           \
+                                          sizeof(*(ptr))))
+
+#define cmpxchg_local(ptr,o,n)                                          \
+        ((__typeof__(*(ptr)))__cmpxchg((ptr),                           \
+                                       (unsigned long)(o),              \
+                                       (unsigned long)(n),              \
+                                       sizeof(*(ptr))))
+
 #endif
 /*
  * Local variables:
diff --git a/xen/include/asm-arm/system.h b/xen/include/asm-arm/system.h
index 8b4c97a..e4cb99c 100644
--- a/xen/include/asm-arm/system.h
+++ b/xen/include/asm-arm/system.h
@@ -29,120 +29,6 @@
 # error "unknown ARM variant"
 #endif
 
-extern void __bad_xchg(volatile void *, int);
-
-static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int 
size)
-{
-        unsigned long ret;
-        unsigned int tmp;
-
-        smp_mb();
-
-        switch (size) {
-        case 1:
-                asm volatile("@ __xchg1\n"
-                "1:     ldrexb  %0, [%3]\n"
-                "       strexb  %1, %2, [%3]\n"
-                "       teq     %1, #0\n"
-                "       bne     1b"
-                        : "=&r" (ret), "=&r" (tmp)
-                        : "r" (x), "r" (ptr)
-                        : "memory", "cc");
-                break;
-        case 4:
-                asm volatile("@ __xchg4\n"
-                "1:     ldrex   %0, [%3]\n"
-                "       strex   %1, %2, [%3]\n"
-                "       teq     %1, #0\n"
-                "       bne     1b"
-                        : "=&r" (ret), "=&r" (tmp)
-                        : "r" (x), "r" (ptr)
-                        : "memory", "cc");
-                break;
-        default:
-                __bad_xchg(ptr, size), ret = 0;
-                break;
-        }
-        smp_mb();
-
-        return ret;
-}
-
-/*
- * Atomic compare and exchange.  Compare OLD with MEM, if identical,
- * store NEW in MEM.  Return the initial value in MEM.  Success is
- * indicated by comparing RETURN with OLD.
- */
-
-extern void __bad_cmpxchg(volatile void *ptr, int size);
-
-static always_inline unsigned long __cmpxchg(
-    volatile void *ptr, unsigned long old, unsigned long new, int size)
-{
-    unsigned long /*long*/ oldval, res;
-
-    switch (size) {
-    case 1:
-        do {
-            asm volatile("@ __cmpxchg1\n"
-                         "       ldrexb  %1, [%2]\n"
-                         "       mov     %0, #0\n"
-                         "       teq     %1, %3\n"
-                         "       strexbeq %0, %4, [%2]\n"
-                         : "=&r" (res), "=&r" (oldval)
-                         : "r" (ptr), "Ir" (old), "r" (new)
-                         : "memory", "cc");
-        } while (res);
-        break;
-    case 2:
-        do {
-            asm volatile("@ __cmpxchg2\n"
-                         "       ldrexh  %1, [%2]\n"
-                         "       mov     %0, #0\n"
-                         "       teq     %1, %3\n"
-                         "       strexheq %0, %4, [%2]\n"
-                         : "=&r" (res), "=&r" (oldval)
-                         : "r" (ptr), "Ir" (old), "r" (new)
-                         : "memory", "cc");
-        } while (res);
-        break;
-    case 4:
-        do {
-            asm volatile("@ __cmpxchg4\n"
-                         "       ldrex   %1, [%2]\n"
-                         "       mov     %0, #0\n"
-                         "       teq     %1, %3\n"
-                         "       strexeq %0, %4, [%2]\n"
-                         : "=&r" (res), "=&r" (oldval)
-                         : "r" (ptr), "Ir" (old), "r" (new)
-                         : "memory", "cc");
-        } while (res);
-        break;
-#if 0
-    case 8:
-        do {
-            asm volatile("@ __cmpxchg8\n"
-                         "       ldrexd   %1, [%2]\n"
-                         "       mov      %0, #0\n"
-                         "       teq      %1, %3\n"
-                         "       strexdeq %0, %4, [%2]\n"
-                         : "=&r" (res), "=&r" (oldval)
-                         : "r" (ptr), "Ir" (old), "r" (new)
-                         : "memory", "cc");
-        } while (res);
-        break;
-#endif
-    default:
-        __bad_cmpxchg(ptr, size);
-        oldval = 0;
-    }
-
-    return oldval;
-}
-#define cmpxchg(ptr,o,n)                                                \
-    ((__typeof__(*(ptr)))__cmpxchg((ptr),(unsigned long)(o),            \
-                                   (unsigned long)(n),sizeof(*(ptr))))
-
 #define local_irq_disable() asm volatile ( "cpsid i @ local_irq_disable\n" : : 
: "cc" )
 #define local_irq_enable()  asm volatile ( "cpsie i @ local_irq_enable\n" : : 
: "cc" )
 
-- 
1.7.2.5


_______________________________________________
Xen-devel mailing list
Xen-devel@xxxxxxxxxxxxx
http://lists.xen.org/xen-devel


 


Rackspace

Lists.xenproject.org is hosted with RackSpace, monitoring our
servers 24x7x365 and backed by RackSpace's Fanatical Support®.