summaryrefslogtreecommitdiff
path: root/sysdeps/sparc/sparc64
diff options
context:
space:
mode:
Diffstat (limited to 'sysdeps/sparc/sparc64')
-rw-r--r--sysdeps/sparc/sparc64/atomicity.h82
-rw-r--r--sysdeps/sparc/sparc64/bits/atomic.h103
2 files changed, 103 insertions, 82 deletions
diff --git a/sysdeps/sparc/sparc64/atomicity.h b/sysdeps/sparc/sparc64/atomicity.h
index 4d2ca52487..e69de29bb2 100644
--- a/sysdeps/sparc/sparc64/atomicity.h
+++ b/sysdeps/sparc/sparc64/atomicity.h
@@ -1,82 +0,0 @@
-/* Low-level functions for atomic operations. Sparc64 version.
- Copyright (C) 1999 Free Software Foundation, Inc.
- This file is part of the GNU C Library.
-
- The GNU C Library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- The GNU C Library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with the GNU C Library; if not, write to the Free
- Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
- 02111-1307 USA. */
-
-#ifndef _ATOMICITY_H
-#define _ATOMICITY_H 1
-
-#include <inttypes.h>
-
-static inline int
-__attribute__ ((unused))
-exchange_and_add (volatile uint32_t *mem, int val)
-{
- uint32_t tmp1, tmp2;
-
- __asm__ __volatile__("1: lduw [%2], %0\n\t"
- " add %0, %3, %1\n\t"
- " cas [%2], %0, %1\n\t"
- " sub %0, %1, %0\n\t"
- " brnz,pn %0, 1b\n\t"
- " nop"
- : "=&r" (tmp1), "=&r" (tmp2)
- : "r" (mem), "r" (val)
- : "memory");
- return tmp2;
-}
-
-static inline void
-__attribute__ ((unused))
-atomic_add (volatile uint32_t *mem, int val)
-{
- uint32_t tmp1, tmp2;
-
- __asm__ __volatile__("1: lduw [%2], %0\n\t"
- " add %0, %3, %1\n\t"
- " cas [%2], %0, %1\n\t"
- " sub %0, %1, %0\n\t"
- " brnz,pn %0, 1b\n\t"
- " nop"
- : "=&r" (tmp1), "=&r" (tmp2)
- : "r" (mem), "r" (val)
- : "memory");
-}
-
-static inline int
-__attribute__ ((unused))
-compare_and_swap (volatile long int *p, long int oldval, long int newval)
-{
- register long int tmp, tmp2;
-
- __asm__ __volatile__("1: ldx [%4], %0\n\t"
- " mov %2, %1\n\t"
- " cmp %0, %3\n\t"
- " bne,a,pn %%xcc, 2f\n\t"
- " mov 0, %0\n\t"
- " casx [%4], %0, %1\n\t"
- " sub %0, %1, %0\n\t"
- " brnz,pn %0, 1b\n\t"
- " mov 1, %0\n\t"
- "2:"
- : "=&r" (tmp), "=&r" (tmp2)
- : "r" (newval), "r" (oldval), "r" (p)
- : "memory");
- return tmp;
-}
-
-#endif /* atomicity.h */
diff --git a/sysdeps/sparc/sparc64/bits/atomic.h b/sysdeps/sparc/sparc64/bits/atomic.h
new file mode 100644
index 0000000000..bb3c91d8d0
--- /dev/null
+++ b/sysdeps/sparc/sparc64/bits/atomic.h
@@ -0,0 +1,103 @@
+/* Atomic operations. sparc64 version.
+ Copyright (C) 2003 Free Software Foundation, Inc.
+ This file is part of the GNU C Library.
+ Contributed by Jakub Jelinek <jakub@redhat.com>, 2003.
+
+ The GNU C Library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ The GNU C Library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with the GNU C Library; if not, write to the Free
+ Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA
+ 02111-1307 USA. */
+
+#include <stdint.h>
+
+typedef int8_t atomic8_t;
+typedef uint8_t uatomic8_t;
+typedef int_fast8_t atomic_fast8_t;
+typedef uint_fast8_t uatomic_fast8_t;
+
+typedef int16_t atomic16_t;
+typedef uint16_t uatomic16_t;
+typedef int_fast16_t atomic_fast16_t;
+typedef uint_fast16_t uatomic_fast16_t;
+
+typedef int32_t atomic32_t;
+typedef uint32_t uatomic32_t;
+typedef int_fast32_t atomic_fast32_t;
+typedef uint_fast32_t uatomic_fast32_t;
+
+typedef int64_t atomic64_t;
+typedef uint64_t uatomic64_t;
+typedef int_fast64_t atomic_fast64_t;
+typedef uint_fast64_t uatomic_fast64_t;
+
+typedef intptr_t atomicptr_t;
+typedef uintptr_t uatomicptr_t;
+typedef intmax_t atomic_max_t;
+typedef uintmax_t uatomic_max_t;
+
+
+#define __arch_compare_and_exchange_val_8_acq(mem, newval, oldval) \
+ (abort (), 0)
+
+#define __arch_compare_and_exchange_val_16_acq(mem, newval, oldval) \
+ (abort (), 0)
+
+#define __arch_compare_and_exchange_val_32_acq(mem, newval, oldval) \
+({ \
+ __typeof (*(mem)) __acev_tmp; \
+ __typeof (mem) __acev_mem = (mem); \
+ __asm __volatile ("cas [%4], %2, %0" \
+ : "=r" (__acev_tmp), "=m" (*__acev_mem) \
+ : "r" (oldval), "m" (*__acev_mem), "r" (__acev_mem), \
+ "0" (newval)); \
+ __acev_tmp; })
+
+#define __arch_compare_and_exchange_val_64_acq(mem, newval, oldval) \
+({ \
+ __typeof (*(mem)) __acev_tmp; \
+ __typeof (mem) __acev_mem = (mem); \
+ __asm __volatile ("casx [%4], %2, %0" \
+ : "=r" (__acev_tmp), "=m" (*__acev_mem) \
+ : "r" ((long) (oldval)), "m" (*__acev_mem), \
+ "r" (__acev_mem), "0" ((long) (newval))); \
+ __acev_tmp; })
+
+#define atomic_exchange(mem, newvalue) \
+ ({ __typeof (*(mem)) __oldval, __val; \
+ __typeof (mem) __memp = (mem); \
+ __typeof (*(mem)) __value = (newvalue); \
+ \
+ if (sizeof (*(mem)) == 4) \
+ __asm ("swap %0, %1" \
+ : "=m" (*__memp), "=r" (__oldval) \
+ : "m" (*__memp), "1" (__value)); \
+ else \
+ { \
+ __val = *__memp; \
+ do \
+ { \
+ __oldval = __val; \
+ __val = atomic_compare_and_exchange_val_acq (__memp, __value, \
+ __oldval); \
+ } \
+ while (__builtin_expect (__val != __oldval, 0)); \
+ } \
+ __oldval; })
+
+#define atomic_full_barrier() \
+ __asm __volatile ("membar #LoadLoad | #LoadStore" \
+ " | #StoreLoad | #StoreStore" : : : "memory")
+#define atomic_read_barrier() \
+ __asm __volatile ("membar #LoadLoad | #LoadStore" : : : "memory")
+#define atomic_write_barrier() \
+ __asm __volatile ("membar #StoreLoad | #StoreStore" : : : "memory")