summaryrefslogtreecommitdiff
path: root/arch/arm/include/asm/atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/arm/include/asm/atomic.h')
-rw-r--r--arch/arm/include/asm/atomic.h9
1 files changed, 9 insertions, 0 deletions
diff --git a/arch/arm/include/asm/atomic.h b/arch/arm/include/asm/atomic.h
index 62d2cb53b069..6e410090896e 100644
--- a/arch/arm/include/asm/atomic.h
+++ b/arch/arm/include/asm/atomic.h
@@ -60,6 +60,7 @@ static inline int atomic_add_return(int i, atomic_t *v)
int result;
smp_mb();
+ prefetchw(&v->counter);
__asm__ __volatile__("@ atomic_add_return\n"
"1: ldrex %0, [%3]\n"
@@ -99,6 +100,7 @@ static inline int atomic_sub_return(int i, atomic_t *v)
int result;
smp_mb();
+ prefetchw(&v->counter);
__asm__ __volatile__("@ atomic_sub_return\n"
"1: ldrex %0, [%3]\n"
@@ -121,6 +123,7 @@ static inline int atomic_cmpxchg(atomic_t *ptr, int old, int new)
unsigned long res;
smp_mb();
+ prefetchw(&ptr->counter);
do {
__asm__ __volatile__("@ atomic_cmpxchg\n"
@@ -299,6 +302,7 @@ static inline long long atomic64_add_return(long long i, atomic64_t *v)
unsigned long tmp;
smp_mb();
+ prefetchw(&v->counter);
__asm__ __volatile__("@ atomic64_add_return\n"
"1: ldrexd %0, %H0, [%3]\n"
@@ -340,6 +344,7 @@ static inline long long atomic64_sub_return(long long i, atomic64_t *v)
unsigned long tmp;
smp_mb();
+ prefetchw(&v->counter);
__asm__ __volatile__("@ atomic64_sub_return\n"
"1: ldrexd %0, %H0, [%3]\n"
@@ -364,6 +369,7 @@ static inline long long atomic64_cmpxchg(atomic64_t *ptr, long long old,
unsigned long res;
smp_mb();
+ prefetchw(&ptr->counter);
do {
__asm__ __volatile__("@ atomic64_cmpxchg\n"
@@ -388,6 +394,7 @@ static inline long long atomic64_xchg(atomic64_t *ptr, long long new)
unsigned long tmp;
smp_mb();
+ prefetchw(&ptr->counter);
__asm__ __volatile__("@ atomic64_xchg\n"
"1: ldrexd %0, %H0, [%3]\n"
@@ -409,6 +416,7 @@ static inline long long atomic64_dec_if_positive(atomic64_t *v)
unsigned long tmp;
smp_mb();
+ prefetchw(&v->counter);
__asm__ __volatile__("@ atomic64_dec_if_positive\n"
"1: ldrexd %0, %H0, [%3]\n"
@@ -436,6 +444,7 @@ static inline int atomic64_add_unless(atomic64_t *v, long long a, long long u)
int ret = 1;
smp_mb();
+ prefetchw(&v->counter);
__asm__ __volatile__("@ atomic64_add_unless\n"
"1: ldrexd %0, %H0, [%4]\n"