summaryrefslogtreecommitdiff
path: root/ACE/ace/Atomic_Op.cpp
diff options
context:
space:
mode:
Diffstat (limited to 'ACE/ace/Atomic_Op.cpp')
-rw-r--r--ACE/ace/Atomic_Op.cpp36
1 files changed, 0 insertions, 36 deletions
diff --git a/ACE/ace/Atomic_Op.cpp b/ACE/ace/Atomic_Op.cpp
index 4b16b311743..52394cd01a3 100644
--- a/ACE/ace/Atomic_Op.cpp
+++ b/ACE/ace/Atomic_Op.cpp
@@ -7,10 +7,6 @@
#if defined (ACE_HAS_BUILTIN_ATOMIC_OP)
-#if defined (ACE_INCLUDE_ATOMIC_OP_SPARC)
-# include "ace/Atomic_Op_Sparc.h"
-#endif /* ACE_INCLUDE_ATOMIC_OP_SPARC */
-
namespace {
#if defined (_MSC_VER)
// Disable "no return value" warning, as we will be putting
@@ -27,10 +23,6 @@ single_cpu_increment (volatile long *value)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "xadd %0, (%1)" : "+r"(tmp) : "r"(addr) );
return tmp + 1;
-#elif !defined (ACE_HAS_SOLARIS_ATOMIC_LIB) && (defined (sun) || \
- (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))))
- return ace_atomic_add_long (
- reinterpret_cast<volatile unsigned long*> (value), 1);
#elif defined(__GNUC__) && defined(__PPC__)
long tmp;
asm("lwz %0,%1" : "=r" (tmp) : "m" (*value) );
@@ -51,10 +43,6 @@ single_cpu_decrement (volatile long *value)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "xadd %0, (%1)" : "+r"(tmp) : "r"(addr) );
return tmp - 1;
-#elif !defined (ACE_HAS_SOLARIS_ATOMIC_LIB) && (defined (sun) || \
- (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))))
- return ace_atomic_add_long (
- reinterpret_cast<volatile unsigned long*> (value), -1);
#elif defined(__GNUC__) && defined(__PPC__)
long tmp;
asm("lwz %0,%1" : "=r" (tmp) : "m" (*value) );
@@ -74,10 +62,6 @@ single_cpu_exchange (volatile long *value, long rhs)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "xchg %0, (%1)" : "+r"(rhs) : "r"(addr) );
return rhs;
-#elif !defined (ACE_HAS_SOLARIS_ATOMIC_LIB) && (defined (sun) || \
- (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))))
- return ace_atomic_swap_long (
- reinterpret_cast<volatile unsigned long*> (value), rhs);
#elif defined(__GNUC__) && defined(__PPC__)
long tmp;
asm("lwz %0,%1" : "=r" (tmp) : "m" (rhs) );
@@ -97,10 +81,6 @@ single_cpu_exchange_add (volatile long *value, long rhs)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "xadd %0, (%1)" : "+r"(rhs) : "r"(addr) );
return rhs;
-#elif !defined (ACE_HAS_SOLARIS_ATOMIC_LIB) && (defined (sun) || \
- (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))))
- return ace_atomic_swap_add_long (
- reinterpret_cast<volatile unsigned long*> (value), rhs);
#elif defined(__GNUC__) && defined(__PPC__)
long tmp;
asm("add %0,%1,%2" : "=r" (tmp) : "r" (*value), "r" (rhs) );
@@ -140,10 +120,6 @@ multi_cpu_increment (volatile long *value)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "lock ; xadd %0, (%1)" : "+r"(tmp) : "r"(addr) );
return tmp + 1;
-#elif !defined (ACE_HAS_SOLARIS_ATOMIC_LIB) && (defined (sun) || \
- (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))))
- return ace_atomic_add_long (
- reinterpret_cast<volatile unsigned long*> (value), 1);
#else /* ACE_HAS_INTEL_ASSEMBLY*/
ACE_UNUSED_ARG (value);
ACE_NOTSUP_RETURN (-1);
@@ -158,10 +134,6 @@ multi_cpu_decrement (volatile long *value)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "lock ; xadd %0, (%1)" : "+r"(tmp) : "r"(addr) );
return tmp - 1;
-#elif !defined (ACE_HAS_SOLARIS_ATOMIC_LIB) && (defined (sun) || \
- (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))))
- return ace_atomic_add_long (
- reinterpret_cast<volatile unsigned long*> (value), -1);
#else /* ACE_HAS_INTEL_ASSEMBLY*/
ACE_UNUSED_ARG (value);
ACE_NOTSUP_RETURN (-1);
@@ -176,10 +148,6 @@ multi_cpu_exchange (volatile long *value, long rhs)
// The XCHG instruction automatically follows LOCK semantics
asm( "xchg %0, (%1)" : "+r"(rhs) : "r"(addr) );
return rhs;
-#elif !defined (ACE_HAS_SOLARIS_ATOMIC_LIB) && (defined (sun) || \
- (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))))
- return ace_atomic_swap_long (
- reinterpret_cast<volatile unsigned long*> (value), rhs);
#else /* ACE_HAS_INTEL_ASSEMBLY*/
ACE_UNUSED_ARG (value);
ACE_UNUSED_ARG (rhs);
@@ -194,10 +162,6 @@ multi_cpu_exchange_add (volatile long *value, long rhs)
unsigned long addr = reinterpret_cast<unsigned long> (value);
asm( "lock ; xadd %0, (%1)" : "+r"(rhs) : "r"(addr) );
return rhs;
-#elif !defined (ACE_HAS_SOLARIS_ATOMIC_LIB) && (defined (sun) || \
- (defined (__SUNPRO_CC) && (defined (__i386) || defined (__x86_64))))
- return ace_atomic_swap_add_long (
- reinterpret_cast<volatile unsigned long*> (value), rhs);
#elif defined (WIN32) && !defined (ACE_HAS_INTERLOCKED_EXCHANGEADD)
# if defined (_MSC_VER)
__asm