From 7158eae4a8c8f15261e50ecab8929050df0a0435 Mon Sep 17 00:00:00 2001 From: Ulrich Drepper Date: Tue, 13 May 2003 21:14:28 +0000 Subject: Update. 2003-05-12 Steven Munroe * sysdeps/powerpc/bits/atomic.h (__arch_compare_and_exchange_bool_8_rel): Define. (__arch_compare_and_exchange_bool_16_rel): Define. (__ARCH_REL_INSTR): Define if not already defined. (__arch_atomic_exchange_and_add_32): Add "memory" to clobber list. (__arch_atomic_decrement_if_positive_32): Add "memory" to clobber list. (__arch_compare_and_exchange_val_32_acq): Remove release sync. (__arch_compare_and_exchange_val_32_rel): Define. (__arch_atomic_exchange_32): Remove. (__arch_atomic_exchange_32_acq): Define. (__arch_atomic_exchange_32_rel): Define. (atomic_compare_and_exchange_val_rel): Define. (atomic_exchange_acq): Use __arch_atomic_exchange_*_acq forms. (atomic_exchange_rel): Define. * sysdeps/powerpc/powerpc32/bits/atomic.h (__arch_compare_and_exchange_bool_32_acq): Remove release sync. (__arch_compare_and_exchange_bool_32_rel): Define. (__arch_compare_and_exchange_bool_64_rel): Define. (__arch_compare_and_exchange_val_64_rel): Define. (__arch_atomic_exchange_64): Remove. (__arch_atomic_exchange_64_acq): Define. (__arch_atomic_exchange_64_rel): Define. * sysdeps/powerpc/powerpc64/bits/atomic.h (__arch_compare_and_exchange_bool_32_rel): Define. (__arch_compare_and_exchange_bool_64_acq): Remove release sync. (__arch_compare_and_exchange_bool_64_rel): Define. (__arch_compare_and_exchange_val_64_acq): Remove release sync. (__arch_compare_and_exchange_val_64_rel): Define. (__arch_atomic_exchange_64): Remove. (__arch_atomic_exchange_64_acq): Define. (__arch_atomic_exchange_64_rel): Define. (__arch_atomic_exchange_and_add_64): Add "memory" to clobber list. (__arch_atomic_decrement_if_positive_64): Add "memory" to clobber list. [!UP](__ARCH_REL_INSTR): Define as lwsync. the space-padded-by-default conversion specifiers, %e, %k, %l. --- sysdeps/powerpc/powerpc32/bits/atomic.h | 31 ++++++++++++++++++++++++++++--- 1 file changed, 28 insertions(+), 3 deletions(-) (limited to 'sysdeps/powerpc/powerpc32/bits/atomic.h') diff --git a/sysdeps/powerpc/powerpc32/bits/atomic.h b/sysdeps/powerpc/powerpc32/bits/atomic.h index 54caa45de1..4e2e24335d 100644 --- a/sysdeps/powerpc/powerpc32/bits/atomic.h +++ b/sysdeps/powerpc/powerpc32/bits/atomic.h @@ -24,10 +24,10 @@ * (a load word and zero (high 32) form). So powerpc64 has a slightly * different version in sysdeps/powerpc/powerpc64/bits/atomic.h. */ -# define __arch_compare_and_exchange_bool_32_acq(mem, newval, oldval) \ +# define __arch_compare_and_exchange_bool_32_acq(mem, newval, oldval) \ ({ \ unsigned int __tmp; \ - __asm __volatile (__ARCH_REL_INSTR "\n" \ + __asm __volatile ( \ "1: lwarx %0,0,%1\n" \ " subf. %0,%2,%0\n" \ " bne 2f\n" \ @@ -40,6 +40,22 @@ __tmp != 0; \ }) +# define __arch_compare_and_exchange_bool_32_rel(mem, newval, oldval) \ +({ \ + unsigned int __tmp; \ + __asm __volatile (__ARCH_REL_INSTR "\n" \ + "1: lwarx %0,0,%1\n" \ + " subf. %0,%2,%0\n" \ + " bne 2f\n" \ + " stwcx. %3,0,%1\n" \ + " bne- 1b\n" \ + "2: " \ + : "=&r" (__tmp) \ + : "b" (mem), "r" (oldval), "r" (newval) \ + : "cr0", "memory"); \ + __tmp != 0; \ +}) + /* * Powerpc32 processors don't implement the 64-bit (doubleword) forms of * load and reserve (ldarx) and store conditional (stdcx.) instructions. @@ -50,8 +66,17 @@ # define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \ (abort (), (__typeof (*mem)) 0) + +# define __arch_compare_and_exchange_bool_64_rel(mem, newval, oldval) \ + (abort (), 0) + +# define __arch_compare_and_exchange_val_64_rel(mem, newval, oldval) \ + (abort (), (__typeof (*mem)) 0) + +# define __arch_atomic_exchange_64_acq(mem, value) \ + ({ abort (); (*mem) = (value); }) -# define __arch_atomic_exchange_64(mem, value) \ +# define __arch_atomic_exchange_64_rel(mem, value) \ ({ abort (); (*mem) = (value); }) # define __arch_atomic_exchange_and_add_64(mem, value) \ -- cgit v1.2.1