diff options
author | Roland McGrath <roland@gnu.org> | 2003-03-26 04:02:03 +0000 |
---|---|---|
committer | Roland McGrath <roland@gnu.org> | 2003-03-26 04:02:03 +0000 |
commit | 859e708f0e44d50deff617c7fd939f4fba295afb (patch) | |
tree | c02276928e6c984391f089af6b54f81220165d0e /sysdeps | |
parent | dd410d4d4585883d02aa497f4227551c5c4cf811 (diff) | |
download | glibc-859e708f0e44d50deff617c7fd939f4fba295afb.tar.gz |
* csu/tst-atomic.c (do_test): Add some new
atomic_compare_and_exchange_val_acq, atomic_add_zero,
atomic_compare_and_exchange_bool_acq and atomic_add_negative tests.
* include/atomic.h (atomic_add_negative, atomic_add_zero):
Prefix local variable so that it doesn't clash with the one
in atomic_exchange_and_add.
* sysdeps/ia64/bits/atomic.h (atomic_exchange): Fix for long/void *
pointers.
(atomic_exchange_and_add): Implement using __sync_fetch_and_add_?i.
* sysdeps/powerpc/bits/atomic.h (atomic_exchange_and_add): Force
value into register.
* sysdeps/s390/bits/atomic.h (__arch_compare_and_exchange_val_64_acq):
Cast newval to long.
* sysdeps/x86_64/bits/atomic.h
(__arch_compare_and_exchange_val_64_acq): Cast newval and oldval to
long.
(atomic_exchange): Cast newvalue to long if sizeof == 8.
(atomic_exchange_and_add): Cast value to long if sizeof == 8.
(atomic_add, atomic_add_negative, atomic_add_zero): Likewise.
(atomic_bit_set): Shift 1L up in all cases to shut up warnings.
Diffstat (limited to 'sysdeps')
-rw-r--r-- | sysdeps/ia64/bits/atomic.h | 33 | ||||
-rw-r--r-- | sysdeps/powerpc/bits/atomic.h | 12 | ||||
-rw-r--r-- | sysdeps/s390/bits/atomic.h | 2 | ||||
-rw-r--r-- | sysdeps/x86_64/bits/atomic.h | 21 |
4 files changed, 36 insertions, 32 deletions
diff --git a/sysdeps/ia64/bits/atomic.h b/sysdeps/ia64/bits/atomic.h index 68d79fa9ec..b6522b3272 100644 --- a/sysdeps/ia64/bits/atomic.h +++ b/sysdeps/ia64/bits/atomic.h @@ -75,23 +75,26 @@ typedef uintmax_t uatomic_max_t; /* Atomically store newval and return the old value. */ #define atomic_exchange(mem, value) \ - __sync_lock_test_and_set_si (mem, value) + ({ __typeof (*mem) __result; \ + if (sizeof (*mem) == 4) \ + __result = __sync_lock_test_and_set_si ((int *) (mem), (int) (value)); \ + else if (sizeof (*mem) == 8) \ + __result = __sync_lock_test_and_set_di ((long *) (mem), \ + (long) (value)); \ + else \ + abort (); \ + __result; }) + #define atomic_exchange_and_add(mem, value) \ - ({ __typeof (*mem) __oldval, __val; \ - __typeof (mem) __memp = (mem); \ - __typeof (*mem) __value = (value); \ - \ - __val = (*__memp); \ - do \ - { \ - __oldval = __val; \ - __val = atomic_compare_and_exchange_val_acq (__memp, \ - __oldval + __value, \ - __oldval); \ - } \ - while (__builtin_expect (__val != __oldval, 0)); \ - __oldval; }) + ({ __typeof (*mem) __result; \ + if (sizeof (*mem) == 4) \ + __result = __sync_fetch_and_add_si ((int *) (mem), (int) (value)); \ + else if (sizeof (*mem) == 8) \ + __result = __sync_fetch_and_add_di ((long *) (mem), (long) (value)); \ + else \ + abort (); \ + __result; }) #define atomic_decrement_if_positive(mem) \ ({ __typeof (*mem) __oldval, __val; \ diff --git a/sysdeps/powerpc/bits/atomic.h b/sysdeps/powerpc/bits/atomic.h index e0f2bd4881..bde0ded10b 100644 --- a/sysdeps/powerpc/bits/atomic.h +++ b/sysdeps/powerpc/bits/atomic.h @@ -143,8 +143,8 @@ typedef uintmax_t uatomic_max_t; __typeof (*mem) __val; \ __asm __volatile (__ARCH_REL_INSTR "\n" \ "1: lwarx %0,0,%2\n" \ - " stwcx. %3,0,%2\n" \ - " bne- 1b" \ + " stwcx. %3,0,%2\n" \ + " bne- 1b" \ : "=&r" (__val), "=m" (*mem) \ : "r" (mem), "r" (value), "1" (*mem) \ : "cr0"); \ @@ -155,11 +155,11 @@ typedef uintmax_t uatomic_max_t; ({ \ __typeof (*mem) __val, __tmp; \ __asm __volatile ("1: lwarx %0,0,%3\n" \ - " addi %1,%0,%4\n" \ - " stwcx. %1,0,%3\n" \ - " bne- 1b" \ + " add %1,%0,%4\n" \ + " stwcx. %1,0,%3\n" \ + " bne- 1b" \ : "=&b" (__val), "=&r" (__tmp), "=m" (*mem) \ - : "r" (mem), "I" (value), "2" (*mem) \ + : "r" (mem), "r" (value), "2" (*mem) \ : "cr0"); \ __val; \ }) diff --git a/sysdeps/s390/bits/atomic.h b/sysdeps/s390/bits/atomic.h index 8504458cc7..51ad6d25e4 100644 --- a/sysdeps/s390/bits/atomic.h +++ b/sysdeps/s390/bits/atomic.h @@ -65,7 +65,7 @@ typedef uintmax_t uatomic_max_t; __typeof (*mem) __archold = (oldval); \ __asm __volatile ("csg %0,%2,%1" \ : "+d" (__archold), "=Q" (*__archmem) \ - : "d" (newval), "m" (*__archmem) : "cc" ); \ + : "d" ((long) (newval)), "m" (*__archmem) : "cc" ); \ __archold; }) #else /* For 31 bit we do not really need 64-bit compare-and-exchange. We can diff --git a/sysdeps/x86_64/bits/atomic.h b/sysdeps/x86_64/bits/atomic.h index 1d41e7761c..0582103f2a 100644 --- a/sysdeps/x86_64/bits/atomic.h +++ b/sysdeps/x86_64/bits/atomic.h @@ -80,7 +80,8 @@ typedef uintmax_t uatomic_max_t; ({ __typeof (*mem) ret; \ __asm __volatile (LOCK "cmpxchgq %q2, %1" \ : "=a" (ret), "=m" (*mem) \ - : "r" (newval), "m" (*mem), "0" (oldval)); \ + : "r" ((long) (newval)), "m" (*mem), \ + "0" ((long) (oldval))); \ ret; }) @@ -102,7 +103,7 @@ typedef uintmax_t uatomic_max_t; else \ __asm __volatile ("xchgq %q0, %1" \ : "=r" (result), "=m" (*mem) \ - : "0" (newvalue), "m" (*mem)); \ + : "0" ((long) (newvalue)), "m" (*mem)); \ result; }) @@ -123,7 +124,7 @@ typedef uintmax_t uatomic_max_t; else \ __asm __volatile (LOCK "xaddq %q0, %1" \ : "=r" (result), "=m" (*mem) \ - : "0" (value), "m" (*mem)); \ + : "0" ((long) (value)), "m" (*mem)); \ result; }) @@ -147,7 +148,7 @@ typedef uintmax_t uatomic_max_t; else \ __asm __volatile (LOCK "addq %q1, %0" \ : "=m" (*mem) \ - : "ir" (value), "m" (*mem)); \ + : "ir" ((long) (value)), "m" (*mem)); \ }) @@ -168,7 +169,7 @@ typedef uintmax_t uatomic_max_t; else \ __asm __volatile (LOCK "addq %q2, %0; sets %1" \ : "=m" (*mem), "=qm" (__result) \ - : "ir" (value), "m" (*mem)); \ + : "ir" ((long) (value)), "m" (*mem)); \ __result; }) @@ -189,7 +190,7 @@ typedef uintmax_t uatomic_max_t; else \ __asm __volatile (LOCK "addq %q2, %0; setz %1" \ : "=m" (*mem), "=qm" (__result) \ - : "ir" (value), "m" (*mem)); \ + : "ir" ((long) (value)), "m" (*mem)); \ __result; }) @@ -279,19 +280,19 @@ typedef uintmax_t uatomic_max_t; (void) ({ if (sizeof (*mem) == 1) \ __asm __volatile (LOCK "orb %b2, %0" \ : "=m" (*mem) \ - : "m" (*mem), "ir" (1 << (bit))); \ + : "m" (*mem), "ir" (1L << (bit))); \ else if (sizeof (*mem) == 2) \ __asm __volatile (LOCK "orw %w2, %0" \ : "=m" (*mem) \ - : "m" (*mem), "ir" (1 << (bit))); \ + : "m" (*mem), "ir" (1L << (bit))); \ else if (sizeof (*mem) == 4) \ __asm __volatile (LOCK "orl %2, %0" \ : "=m" (*mem) \ - : "m" (*mem), "ir" (1 << (bit))); \ + : "m" (*mem), "ir" (1L << (bit))); \ else if (__builtin_constant_p (bit) && (bit) < 32) \ __asm __volatile (LOCK "orq %2, %0" \ : "=m" (*mem) \ - : "m" (*mem), "i" (1 << (bit))); \ + : "m" (*mem), "i" (1L << (bit))); \ else \ __asm __volatile (LOCK "orq %q2, %0" \ : "=m" (*mem) \ |