diff options
author | Marko Mäkelä <marko.makela@mariadb.com> | 2020-05-18 17:30:02 +0300 |
---|---|---|
committer | Marko Mäkelä <marko.makela@mariadb.com> | 2020-05-18 17:30:02 +0300 |
commit | 23047d3ed42eacb5eaa5475a8fa4161550d01c52 (patch) | |
tree | e723067fea7cfb663ef7e4777e29ffd7dcd2905d /include | |
parent | f86d97c3fee0127d83c009e448f419ac857c72c7 (diff) | |
parent | faf6d0ef3f9323c6d24f8cdb6533ed23d1a6bb40 (diff) | |
download | mariadb-git-23047d3ed42eacb5eaa5475a8fa4161550d01c52.tar.gz |
Merge 10.4 into 10.5
Diffstat (limited to 'include')
-rw-r--r-- | include/my_atomic.h | 45 | ||||
-rw-r--r-- | include/my_valgrind.h | 21 | ||||
-rw-r--r-- | include/span.h | 62 |
3 files changed, 99 insertions, 29 deletions
diff --git a/include/my_atomic.h b/include/my_atomic.h index 1c54c24d455..88f6746ba3d 100644 --- a/include/my_atomic.h +++ b/include/my_atomic.h @@ -2,6 +2,7 @@ #define MY_ATOMIC_INCLUDED /* Copyright (c) 2006, 2010, Oracle and/or its affiliates. All rights reserved. + Copyright (c) 2018, 2020, MariaDB This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by @@ -169,4 +170,48 @@ my_atomic_casptr((P), (E), (D)) #endif +#ifdef __cplusplus +#include <atomic> +/** + A wrapper for std::atomic, defaulting to std::memory_order_relaxed. + + When it comes to atomic loads or stores at std::memory_order_relaxed + on IA-32 or AMD64, this wrapper is only introducing some constraints + to the C++ compiler, to prevent some optimizations of loads or + stores. + + On POWER and ARM, atomic loads and stores involve different instructions + from normal loads and stores and will thus incur some overhead. + + Because atomic read-modify-write operations will always incur + overhead, we intentionally do not define + operator++(), operator--(), operator+=(), operator-=(), or similar, + to make the overhead stand out in the users of this code. +*/ +template <typename Type> class Atomic_relaxed +{ + std::atomic<Type> m; +public: + Atomic_relaxed(const Atomic_relaxed<Type> &rhs) + { m.store(rhs, std::memory_order_relaxed); } + Atomic_relaxed(Type val) : m(val) {} + Atomic_relaxed() {} + + operator Type() const { return m.load(std::memory_order_relaxed); } + Type operator=(const Type val) + { m.store(val, std::memory_order_relaxed); return val; } + Type operator=(const Atomic_relaxed<Type> &rhs) { return *this= Type{rhs}; } + Type fetch_add(const Type i, std::memory_order o= std::memory_order_relaxed) + { return m.fetch_add(i, o); } + Type fetch_sub(const Type i, std::memory_order o= std::memory_order_relaxed) + { return m.fetch_sub(i, o); } + bool compare_exchange_strong(Type& i1, const Type i2, + std::memory_order o1= std::memory_order_relaxed, + std::memory_order o2= std::memory_order_relaxed) + { return m.compare_exchange_strong(i1, i2, o1, o2); } + Type exchange(const Type i, std::memory_order o= std::memory_order_relaxed) + { return m.exchange(i, o); } +}; +#endif /* __cplusplus */ + #endif /* MY_ATOMIC_INCLUDED */ diff --git a/include/my_valgrind.h b/include/my_valgrind.h index 2c623a044f4..8aaa261fd5b 100644 --- a/include/my_valgrind.h +++ b/include/my_valgrind.h @@ -32,6 +32,7 @@ #if defined(HAVE_VALGRIND_MEMCHECK_H) && defined(HAVE_valgrind) # include <valgrind/memcheck.h> +# define HAVE_valgrind_or_MSAN # define MEM_UNDEFINED(a,len) VALGRIND_MAKE_MEM_UNDEFINED(a,len) # define MEM_MAKE_DEFINED(a,len) VALGRIND_MAKE_MEM_DEFINED(a,len) # define MEM_NOACCESS(a,len) VALGRIND_MAKE_MEM_NOACCESS(a,len) @@ -50,6 +51,7 @@ https://github.com/google/sanitizers/wiki/AddressSanitizerManualPoisoning */ # define REDZONE_SIZE 8 #elif __has_feature(memory_sanitizer) # include <sanitizer/msan_interface.h> +# define HAVE_valgrind_or_MSAN # define MEM_UNDEFINED(a,len) __msan_allocated_memory(a,len) # define MEM_MAKE_DEFINED(a,len) __msan_unpoison(a,len) # define MEM_NOACCESS(a,len) ((void) 0) @@ -65,16 +67,19 @@ https://github.com/google/sanitizers/wiki/AddressSanitizerManualPoisoning */ # define REDZONE_SIZE 0 #endif /* HAVE_VALGRIND_MEMCHECK_H */ -#if defined(TRASH_FREED_MEMORY) -/* NOTE: Do not invoke TRASH_FILL directly! Use TRASH_ALLOC or TRASH_FREE. - -The MEM_UNDEFINED() call before memset() is for canceling the effect -of any previous MEM_NOACCESS(). We must invoke MEM_UNDEFINED() after -writing the dummy pattern, unless MEM_NOACCESS() is going to be invoked. -On AddressSanitizer, the MEM_UNDEFINED() in TRASH_ALLOC() has no effect. */ +#ifdef TRASH_FREED_MEMORY +/* + TRASH_FILL() has to call MEM_UNDEFINED() to cancel any effect of TRASH_FREE(). + This can happen in the case one does + TRASH_ALLOC(A,B) ; TRASH_FREE(A,B) ; TRASH_ALLOC(A,B) + to reuse the same memory in an internal memory allocator like MEM_ROOT. + For my_malloc() and safemalloc() the extra MEM_UNDEFINED is bit of an + overkill. + TRASH_FILL() is an internal function and should not be used externally. +*/ #define TRASH_FILL(A,B,C) do { const size_t trash_tmp= (B); MEM_UNDEFINED(A, trash_tmp); memset(A, C, trash_tmp); } while (0) #else -#define TRASH_FILL(A,B,C) while (0) +#define TRASH_FILL(A,B,C) do { MEM_UNDEFINED((A), (B)); } while (0) #endif /** Note that some memory became allocated or uninitialized. */ #define TRASH_ALLOC(A,B) do { TRASH_FILL(A,B,0xA5); MEM_UNDEFINED(A,B); } while(0) diff --git a/include/span.h b/include/span.h index 0ed0158088c..0e8516933c6 100644 --- a/include/span.h +++ b/include/span.h @@ -24,11 +24,33 @@ this program; if not, write to the Free Software Foundation, Inc., namespace st_ { +namespace detail +{ + +template <class T> struct remove_cv +{ + typedef T type; +}; +template <class T> struct remove_cv<const T> +{ + typedef T type; +}; +template <class T> struct remove_cv<volatile T> +{ + typedef T type; +}; +template <class T> struct remove_cv<const volatile T> +{ + typedef T type; +}; + +} // namespace detail + template <class ElementType> class span { public: typedef ElementType element_type; - typedef ElementType value_type; + typedef typename detail::remove_cv<ElementType>::type value_type; typedef size_t size_type; typedef ptrdiff_t difference_type; typedef element_type *pointer; @@ -38,7 +60,6 @@ public: typedef pointer iterator; typedef const_pointer const_iterator; typedef std::reverse_iterator<iterator> reverse_iterator; - typedef std::reverse_iterator<const_iterator> const_reverse_iterator; span() : data_(NULL), size_(0) {} @@ -64,73 +85,72 @@ public: span &operator=(const span &other) { - data_= other.data_; - size_= other.size_; + data_= other.data(); + size_= other.size(); return *this; } template <size_t Count> span<element_type> first() const { assert(!empty()); - return span(data_, 1); + return span(data(), 1); } template <size_t Count> span<element_type> last() const { assert(!empty()); - return span(data_ + size() - 1, 1); + return span(data() + size() - 1, 1); } span<element_type> first(size_type count) const { assert(!empty()); - return span(data_, 1); + return span(data(), 1); } span<element_type> last(size_type count) const { assert(!empty()); - return span(data_ + size() - 1, 1); + return span(data() + size() - 1, 1); } span<element_type> subspan(size_type offset, size_type count) const { assert(!empty()); assert(size() >= offset + count); - return span(data_ + offset, count); + return span(data() + offset, count); } size_type size() const { return size_; } - size_type size_bytes() const { return size_ * sizeof(ElementType); } - bool empty() const __attribute__((warn_unused_result)) { return size_ == 0; } + size_type size_bytes() const { return size() * sizeof(ElementType); } + bool empty() const __attribute__((warn_unused_result)) + { + return size() == 0; + } reference operator[](size_type idx) const { assert(size() > idx); - return data_[idx]; + return data()[idx]; } reference front() const { assert(!empty()); - return data_[0]; + return data()[0]; } reference back() const { assert(!empty()); - return data_[size() - 1]; - } - pointer data() const - { - assert(!empty()); - return data_; + return data()[size() - 1]; } + pointer data() const { return data_; } iterator begin() const { return data_; } iterator end() const { return data_ + size_; } reverse_iterator rbegin() const { - return std::reverse_iterator<iterator>(std::advance(end(), -1)); + return std::reverse_iterator<iterator>(end()); } reverse_iterator rend() const { - return std::reverse_iterator<iterator>(std::advance(begin(), -1)); + return std::reverse_iterator<iterator>(begin()); } private: |