summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorylavic <ylavic@13f79535-47bb-0310-9956-ffa450edef68>2022-06-26 11:47:11 +0000
committerylavic <ylavic@13f79535-47bb-0310-9956-ffa450edef68>2022-06-26 11:47:11 +0000
commit35591f029bf8cf34d45fca07f917dfe5ac232306 (patch)
tree051a907c24123f743c9600f2c869a13f959cf78f
parenteab43f3947a42863d9d226346f8718975ae5ecd8 (diff)
downloadlibapr-35591f029bf8cf34d45fca07f917dfe5ac232306.tar.gz
atomic: Avoid casts and/or use correct ones.
* atomic/os390/atomic.c(apr_atomic_xchgptr): Dereferencing without casting is fine/better. * atomic/unix/mutex.c(apr_atomic_casptr, apr_atomic_xchgptr): Dereferencing without casting is fine/better. * atomic/win32/apr_atomic.c(apr_atomic_add32, apr_atomic_sub32, apr_atomic_inc32, apr_atomic_dev32, apr_atomic_set32, apr_atomic_cas32, apr_atomic_xchg32): Native Interlocked 32bit functions expect "long volatile *", don't cast out volatility. * atomic/win32/apr_atomic.c(apr_atomic_casptr): 32bit InterlockedCompareExchangePointer() expects "long volatile *", don't cast to (void **). * atomic/win32/apr_atomic.c(apr_atomic_xchgptr): InterlockedExchangePointer() for both 32bit and 64bit expects "void *volatile *", no need to cast. git-svn-id: https://svn.apache.org/repos/asf/apr/apr/trunk@1902257 13f79535-47bb-0310-9956-ffa450edef68
-rw-r--r--atomic/os390/atomic.c2
-rw-r--r--atomic/unix/mutex.c4
-rw-r--r--atomic/win32/apr_atomic.c18
3 files changed, 12 insertions, 12 deletions
diff --git a/atomic/os390/atomic.c b/atomic/os390/atomic.c
index 249ee15c2..9442f27ea 100644
--- a/atomic/os390/atomic.c
+++ b/atomic/os390/atomic.c
@@ -124,7 +124,7 @@ APR_DECLARE(void*) apr_atomic_xchgptr(void *volatile *mem_ptr, void *new_ptr)
{
void *old_ptr;
- old_ptr = *(void **)mem_ptr; /* old is automatically updated on cs failure */
+ old_ptr = *mem_ptr; /* old is automatically updated on cs failure */
#if APR_SIZEOF_VOIDP == 4
do {
} while (__cs1(&old_ptr, mem_ptr, &new_ptr));
diff --git a/atomic/unix/mutex.c b/atomic/unix/mutex.c
index 78ad75336..61919ecd3 100644
--- a/atomic/unix/mutex.c
+++ b/atomic/unix/mutex.c
@@ -180,7 +180,7 @@ APR_DECLARE(void*) apr_atomic_casptr(void *volatile *mem, void *with, const void
void *prev;
DECLARE_MUTEX_LOCKED(mutex, *mem);
- prev = *(void **)mem;
+ prev = *mem;
if (prev == cmp) {
*mem = with;
}
@@ -195,7 +195,7 @@ APR_DECLARE(void*) apr_atomic_xchgptr(void *volatile *mem, void *with)
void *prev;
DECLARE_MUTEX_LOCKED(mutex, *mem);
- prev = *(void **)mem;
+ prev = *mem;
*mem = with;
MUTEX_UNLOCK(mutex);
diff --git a/atomic/win32/apr_atomic.c b/atomic/win32/apr_atomic.c
index 75abf92e9..ba48589b7 100644
--- a/atomic/win32/apr_atomic.c
+++ b/atomic/win32/apr_atomic.c
@@ -30,7 +30,7 @@ APR_DECLARE(apr_uint32_t) apr_atomic_add32(volatile apr_uint32_t *mem, apr_uint3
#if (defined(_M_IA64) || defined(_M_AMD64))
return InterlockedExchangeAdd(mem, val);
#else
- return InterlockedExchangeAdd((long *)mem, val);
+ return InterlockedExchangeAdd((long volatile *)mem, val);
#endif
}
@@ -44,7 +44,7 @@ APR_DECLARE(void) apr_atomic_sub32(volatile apr_uint32_t *mem, apr_uint32_t val)
#if (defined(_M_IA64) || defined(_M_AMD64))
InterlockedExchangeAdd(mem, -val);
#else
- InterlockedExchangeAdd((long *)mem, -val);
+ InterlockedExchangeAdd((long volatile *)mem, -val);
#endif
}
@@ -54,7 +54,7 @@ APR_DECLARE(apr_uint32_t) apr_atomic_inc32(volatile apr_uint32_t *mem)
#if (defined(_M_IA64) || defined(_M_AMD64)) && !defined(RC_INVOKED)
return InterlockedIncrement(mem) - 1;
#else
- return InterlockedIncrement((long *)mem) - 1;
+ return InterlockedIncrement((long volatile *)mem) - 1;
#endif
}
@@ -63,7 +63,7 @@ APR_DECLARE(int) apr_atomic_dec32(volatile apr_uint32_t *mem)
#if (defined(_M_IA64) || defined(_M_AMD64)) && !defined(RC_INVOKED)
return InterlockedDecrement(mem);
#else
- return InterlockedDecrement((long *)mem);
+ return InterlockedDecrement((long volatile *)mem);
#endif
}
@@ -72,7 +72,7 @@ APR_DECLARE(void) apr_atomic_set32(volatile apr_uint32_t *mem, apr_uint32_t val)
#if (defined(_M_IA64) || defined(_M_AMD64)) && !defined(RC_INVOKED)
InterlockedExchange(mem, val);
#else
- InterlockedExchange((long*)mem, val);
+ InterlockedExchange((long volatile *)mem, val);
#endif
}
@@ -87,7 +87,7 @@ APR_DECLARE(apr_uint32_t) apr_atomic_cas32(volatile apr_uint32_t *mem, apr_uint3
#if (defined(_M_IA64) || defined(_M_AMD64)) && !defined(RC_INVOKED)
return InterlockedCompareExchange(mem, with, cmp);
#else
- return InterlockedCompareExchange((long*)mem, with, cmp);
+ return InterlockedCompareExchange((long volatile *)mem, with, cmp);
#endif
}
@@ -96,7 +96,7 @@ APR_DECLARE(void *) apr_atomic_casptr(void *volatile *mem, void *with, const voi
#if (defined(_M_IA64) || defined(_M_AMD64)) && !defined(RC_INVOKED)
return InterlockedCompareExchangePointer(mem, with, (void*)cmp);
#else
- return InterlockedCompareExchangePointer((void**)mem, with, (void*)cmp);
+ return InterlockedCompareExchangePointer((long volatile *)mem, with, (void*)cmp);
#endif
}
@@ -105,11 +105,11 @@ APR_DECLARE(apr_uint32_t) apr_atomic_xchg32(volatile apr_uint32_t *mem, apr_uint
#if (defined(_M_IA64) || defined(_M_AMD64)) && !defined(RC_INVOKED)
return InterlockedExchange(mem, val);
#else
- return InterlockedExchange((long *)mem, val);
+ return InterlockedExchange((long volatile *)mem, val);
#endif
}
APR_DECLARE(void*) apr_atomic_xchgptr(void *volatile *mem, void *with)
{
- return InterlockedExchangePointer((void**)mem, with);
+ return InterlockedExchangePointer(mem, with);
}