提交 9560782f 编写于 作者: R Russell King 提交者: Russell King

[PATCH] ARM SMP: Use exclusive load/store for __xchg

Signed-off-by: NRussell King <rmk+kernel@arm.linux.org.uk>
上级 6b6a93c6
...@@ -323,12 +323,8 @@ do { \ ...@@ -323,12 +323,8 @@ do { \
* NOTE that this solution won't work on an SMP system, so explcitly * NOTE that this solution won't work on an SMP system, so explcitly
* forbid it here. * forbid it here.
*/ */
#ifdef CONFIG_SMP
#error SMP is not supported on SA1100/SA110
#else
#define swp_is_buggy #define swp_is_buggy
#endif #endif
#endif
static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size) static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size)
{ {
...@@ -337,35 +333,68 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size ...@@ -337,35 +333,68 @@ static inline unsigned long __xchg(unsigned long x, volatile void *ptr, int size
#ifdef swp_is_buggy #ifdef swp_is_buggy
unsigned long flags; unsigned long flags;
#endif #endif
#if __LINUX_ARM_ARCH__ >= 6
unsigned int tmp;
#endif
switch (size) { switch (size) {
#ifdef swp_is_buggy #if __LINUX_ARM_ARCH__ >= 6
case 1: case 1:
local_irq_save(flags); asm volatile("@ __xchg1\n"
ret = *(volatile unsigned char *)ptr; "1: ldrexb %0, [%3]\n"
*(volatile unsigned char *)ptr = x; " strexb %1, %2, [%3]\n"
local_irq_restore(flags); " teq %1, #0\n"
break; " bne 1b"
: "=&r" (ret), "=&r" (tmp)
case 4: : "r" (x), "r" (ptr)
local_irq_save(flags); : "memory", "cc");
ret = *(volatile unsigned long *)ptr; break;
*(volatile unsigned long *)ptr = x; case 4:
local_irq_restore(flags); asm volatile("@ __xchg4\n"
break; "1: ldrex %0, [%3]\n"
" strex %1, %2, [%3]\n"
" teq %1, #0\n"
" bne 1b"
: "=&r" (ret), "=&r" (tmp)
: "r" (x), "r" (ptr)
: "memory", "cc");
break;
#elif defined(swp_is_buggy)
#ifdef CONFIG_SMP
#error SMP is not supported on this platform
#endif
case 1:
local_irq_save(flags);
ret = *(volatile unsigned char *)ptr;
*(volatile unsigned char *)ptr = x;
local_irq_restore(flags);
break;
case 4:
local_irq_save(flags);
ret = *(volatile unsigned long *)ptr;
*(volatile unsigned long *)ptr = x;
local_irq_restore(flags);
break;
#else #else
case 1: __asm__ __volatile__ ("swpb %0, %1, [%2]" case 1:
: "=&r" (ret) asm volatile("@ __xchg1\n"
: "r" (x), "r" (ptr) " swpb %0, %1, [%2]"
: "memory", "cc"); : "=&r" (ret)
break; : "r" (x), "r" (ptr)
case 4: __asm__ __volatile__ ("swp %0, %1, [%2]" : "memory", "cc");
: "=&r" (ret) break;
: "r" (x), "r" (ptr) case 4:
: "memory", "cc"); asm volatile("@ __xchg4\n"
break; " swp %0, %1, [%2]"
: "=&r" (ret)
: "r" (x), "r" (ptr)
: "memory", "cc");
break;
#endif #endif
default: __bad_xchg(ptr, size), ret = 0; default:
__bad_xchg(ptr, size), ret = 0;
break;
} }
return ret; return ret;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册