summaryrefslogtreecommitdiff
path: root/storage/perfschema/pfs_atomic.h
diff options
context:
space:
mode:
Diffstat (limited to 'storage/perfschema/pfs_atomic.h')
-rw-r--r--storage/perfschema/pfs_atomic.h79
1 files changed, 79 insertions, 0 deletions
diff --git a/storage/perfschema/pfs_atomic.h b/storage/perfschema/pfs_atomic.h
index ffb4c24ecbf..61b8c2b2804 100644
--- a/storage/perfschema/pfs_atomic.h
+++ b/storage/perfschema/pfs_atomic.h
@@ -43,6 +43,16 @@ public:
}
/** Atomic load. */
+ static inline int64 load_64(volatile int64 *ptr)
+ {
+ int64 result;
+ rdlock(ptr);
+ result= my_atomic_load64(ptr);
+ rdunlock(ptr);
+ return result;
+ }
+
+ /** Atomic load. */
static inline uint32 load_u32(volatile uint32 *ptr)
{
uint32 result;
@@ -52,6 +62,16 @@ public:
return result;
}
+ /** Atomic load. */
+ static inline uint64 load_u64(volatile uint64 *ptr)
+ {
+ uint64 result;
+ rdlock(ptr);
+ result= (uint64) my_atomic_load64((int64*) ptr);
+ rdunlock(ptr);
+ return result;
+ }
+
/** Atomic store. */
static inline void store_32(volatile int32 *ptr, int32 value)
{
@@ -61,6 +81,14 @@ public:
}
/** Atomic store. */
+ static inline void store_64(volatile int64 *ptr, int64 value)
+ {
+ wrlock(ptr);
+ my_atomic_store64(ptr, value);
+ wrunlock(ptr);
+ }
+
+ /** Atomic store. */
static inline void store_u32(volatile uint32 *ptr, uint32 value)
{
wrlock(ptr);
@@ -68,6 +96,14 @@ public:
wrunlock(ptr);
}
+ /** Atomic store. */
+ static inline void store_u64(volatile uint64 *ptr, uint64 value)
+ {
+ wrlock(ptr);
+ my_atomic_store64((int64*) ptr, (int64) value);
+ wrunlock(ptr);
+ }
+
/** Atomic add. */
static inline int32 add_32(volatile int32 *ptr, int32 value)
{
@@ -79,6 +115,16 @@ public:
}
/** Atomic add. */
+ static inline int64 add_64(volatile int64 *ptr, int64 value)
+ {
+ int64 result;
+ wrlock(ptr);
+ result= my_atomic_add64(ptr, value);
+ wrunlock(ptr);
+ return result;
+ }
+
+ /** Atomic add. */
static inline uint32 add_u32(volatile uint32 *ptr, uint32 value)
{
uint32 result;
@@ -88,6 +134,16 @@ public:
return result;
}
+ /** Atomic add. */
+ static inline uint64 add_u64(volatile uint64 *ptr, uint64 value)
+ {
+ uint64 result;
+ wrlock(ptr);
+ result= (uint64) my_atomic_add64((int64*) ptr, (int64) value);
+ wrunlock(ptr);
+ return result;
+ }
+
/** Atomic compare and swap. */
static inline bool cas_32(volatile int32 *ptr, int32 *old_value,
int32 new_value)
@@ -100,6 +156,17 @@ public:
}
/** Atomic compare and swap. */
+ static inline bool cas_64(volatile int64 *ptr, int64 *old_value,
+ int64 new_value)
+ {
+ bool result;
+ wrlock(ptr);
+ result= my_atomic_cas64(ptr, old_value, new_value);
+ wrunlock(ptr);
+ return result;
+ }
+
+ /** Atomic compare and swap. */
static inline bool cas_u32(volatile uint32 *ptr, uint32 *old_value,
uint32 new_value)
{
@@ -111,6 +178,18 @@ public:
return result;
}
+ /** Atomic compare and swap. */
+ static inline bool cas_u64(volatile uint64 *ptr, uint64 *old_value,
+ uint64 new_value)
+ {
+ bool result;
+ wrlock(ptr);
+ result= my_atomic_cas64((int64*) ptr, (int64*) old_value,
+ (uint64) new_value);
+ wrunlock(ptr);
+ return result;
+ }
+
private:
static my_atomic_rwlock_t m_rwlock_array[256];