diff --git a/arch/arm/include/asm/atomic.h b/arch/arm/include/asm/atomic.h index da1c77d39327963ab10e633aeb8809aac7da2dec..55ffc3b850f43e17b4337c83af16c1c0bdf81afc 100644 --- a/arch/arm/include/asm/atomic.h +++ b/arch/arm/include/asm/atomic.h @@ -12,6 +12,7 @@ #define __ASM_ARM_ATOMIC_H #include +#include #include #include #include @@ -41,6 +42,7 @@ static inline void atomic_add(int i, atomic_t *v) unsigned long tmp; int result; + prefetchw(&v->counter); __asm__ __volatile__("@ atomic_add\n" "1: ldrex %0, [%3]\n" " add %0, %0, %4\n" @@ -79,6 +81,7 @@ static inline void atomic_sub(int i, atomic_t *v) unsigned long tmp; int result; + prefetchw(&v->counter); __asm__ __volatile__("@ atomic_sub\n" "1: ldrex %0, [%3]\n" " sub %0, %0, %4\n" @@ -138,6 +141,7 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr) { unsigned long tmp, tmp2; + prefetchw(addr); __asm__ __volatile__("@ atomic_clear_mask\n" "1: ldrex %0, [%3]\n" " bic %0, %0, %4\n" @@ -283,6 +287,7 @@ static inline void atomic64_set(atomic64_t *v, u64 i) { u64 tmp; + prefetchw(&v->counter); __asm__ __volatile__("@ atomic64_set\n" "1: ldrexd %0, %H0, [%2]\n" " strexd %0, %3, %H3, [%2]\n" @@ -299,6 +304,7 @@ static inline void atomic64_add(u64 i, atomic64_t *v) u64 result; unsigned long tmp; + prefetchw(&v->counter); __asm__ __volatile__("@ atomic64_add\n" "1: ldrexd %0, %H0, [%3]\n" " adds %0, %0, %4\n" @@ -339,6 +345,7 @@ static inline void atomic64_sub(u64 i, atomic64_t *v) u64 result; unsigned long tmp; + prefetchw(&v->counter); __asm__ __volatile__("@ atomic64_sub\n" "1: ldrexd %0, %H0, [%3]\n" " subs %0, %0, %4\n"