From 17d4cb5479fb4bea133f8556eba6781d3707eeab Mon Sep 17 00:00:00 2001 From: Tim Blechmann Date: Tue, 21 May 2013 09:54:09 +0000 Subject: [PATCH] atomic: merge fixes from trunk [SVN r84400] --- include/boost/atomic/detail/base.hpp | 67 +++++++++- include/boost/atomic/detail/cas32strong.hpp | 6 +- include/boost/atomic/detail/cas32weak.hpp | 20 ++- include/boost/atomic/detail/cas64strong.hpp | 6 +- include/boost/atomic/detail/gcc-ppc.hpp | 78 ++++++++++- include/boost/atomic/detail/gcc-sparcv9.hpp | 20 ++- include/boost/atomic/detail/gcc-x86.hpp | 139 +++++++++++++++++--- include/boost/atomic/detail/lockpool.hpp | 1 + include/boost/atomic/detail/windows.hpp | 18 ++- test/api_test_helpers.hpp | 35 +++-- test/lockfree.cpp | 6 +- 11 files changed, 352 insertions(+), 44 deletions(-) diff --git a/include/boost/atomic/detail/base.hpp b/include/boost/atomic/detail/base.hpp index c0c4549..54dac60 100644 --- a/include/boost/atomic/detail/base.hpp +++ b/include/boost/atomic/detail/base.hpp @@ -79,6 +79,43 @@ return fetch_sub(v) - v; \ } \ +#define BOOST_ATOMIC_DECLARE_VOID_POINTER_ADDITIVE_OPERATORS \ + value_type \ + operator++(int) volatile BOOST_NOEXCEPT \ + { \ + return fetch_add(1); \ + } \ + \ + value_type \ + operator++(void) volatile BOOST_NOEXCEPT \ + { \ + return (char*)fetch_add(1) + 1; \ + } \ + \ + value_type \ + operator--(int) volatile BOOST_NOEXCEPT \ + { \ + return fetch_sub(1); \ + } \ + \ + value_type \ + operator--(void) volatile BOOST_NOEXCEPT \ + { \ + return (char*)fetch_sub(1) - 1; \ + } \ + \ + value_type \ + operator+=(difference_type v) volatile BOOST_NOEXCEPT \ + { \ + return (char*)fetch_add(v) + v; \ + } \ + \ + value_type \ + operator-=(difference_type v) volatile BOOST_NOEXCEPT \ + { \ + return (char*)fetch_sub(v) - v; \ + } \ + #define BOOST_ATOMIC_DECLARE_BIT_OPERATORS \ value_type \ operator&=(difference_type v) volatile BOOST_NOEXCEPT \ @@ -102,6 +139,10 @@ BOOST_ATOMIC_DECLARE_BASE_OPERATORS \ BOOST_ATOMIC_DECLARE_ADDITIVE_OPERATORS \ +#define BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS \ + BOOST_ATOMIC_DECLARE_BASE_OPERATORS \ + BOOST_ATOMIC_DECLARE_VOID_POINTER_ADDITIVE_OPERATORS \ + #define BOOST_ATOMIC_DECLARE_INTEGRAL_OPERATORS \ BOOST_ATOMIC_DECLARE_BASE_OPERATORS \ BOOST_ATOMIC_DECLARE_ADDITIVE_OPERATORS \ @@ -444,6 +485,7 @@ class base_atomic { private: typedef base_atomic this_type; + typedef ptrdiff_t difference_type; typedef void * value_type; typedef lockpool::scoped_lock guard_type; public: @@ -506,7 +548,30 @@ public: return false; } - BOOST_ATOMIC_DECLARE_BASE_OPERATORS + value_type fetch_add(difference_type v, memory_order /*order*/ = memory_order_seq_cst) volatile BOOST_NOEXCEPT + { + guard_type guard(const_cast(&v_)); + + value_type old = v_; + char * cv = reinterpret_cast(old); + cv += v; + v_ = cv; + return old; + } + + value_type fetch_sub(difference_type v, memory_order /*order*/ = memory_order_seq_cst) volatile + { + guard_type guard(const_cast(&v_)); + + value_type old = v_; + char * cv = reinterpret_cast(old); + cv -= v; + v_ = cv; + return old; + } + + BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS + private: base_atomic(const base_atomic &) /* = delete */ ; void operator=(const base_atomic &) /* = delete */ ; diff --git a/include/boost/atomic/detail/cas32strong.hpp b/include/boost/atomic/detail/cas32strong.hpp index 909a4a4..ac66a12 100644 --- a/include/boost/atomic/detail/cas32strong.hpp +++ b/include/boost/atomic/detail/cas32strong.hpp @@ -479,7 +479,7 @@ public: { value_type original = load(memory_order_relaxed); do { - } while (!compare_exchange_weak(original, original + v, order, memory_order_relaxed)); + } while (!compare_exchange_weak(original, (char*)original + v, order, memory_order_relaxed)); return original; } @@ -488,7 +488,7 @@ public: { value_type original = load(memory_order_relaxed); do { - } while (!compare_exchange_weak(original, original - v, order, memory_order_relaxed)); + } while (!compare_exchange_weak(original, (char*)original - v, order, memory_order_relaxed)); return original; } @@ -498,7 +498,7 @@ public: return true; } - BOOST_ATOMIC_DECLARE_BASE_OPERATORS + BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS private: base_atomic(const base_atomic &) /* = delete */ ; void operator=(const base_atomic &) /* = delete */ ; diff --git a/include/boost/atomic/detail/cas32weak.hpp b/include/boost/atomic/detail/cas32weak.hpp index a808e31..de2314c 100644 --- a/include/boost/atomic/detail/cas32weak.hpp +++ b/include/boost/atomic/detail/cas32weak.hpp @@ -509,7 +509,25 @@ public: return true; } - BOOST_ATOMIC_DECLARE_BASE_OPERATORS + value_type + fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT + { + value_type original = load(memory_order_relaxed); + do { + } while (!compare_exchange_weak(original, (char*)original + v, order, memory_order_relaxed)); + return original; + } + + value_type + fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT + { + value_type original = load(memory_order_relaxed); + do { + } while (!compare_exchange_weak(original, (char*)original - v, order, memory_order_relaxed)); + return original; + } + + BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS private: base_atomic(const base_atomic &) /* = delete */ ; void operator=(const base_atomic &) /* = delete */ ; diff --git a/include/boost/atomic/detail/cas64strong.hpp b/include/boost/atomic/detail/cas64strong.hpp index 41cb522..0a5002b 100644 --- a/include/boost/atomic/detail/cas64strong.hpp +++ b/include/boost/atomic/detail/cas64strong.hpp @@ -223,7 +223,7 @@ public: { value_type original = load(memory_order_relaxed); do { - } while (!compare_exchange_weak(original, original + v, order, memory_order_relaxed)); + } while (!compare_exchange_weak(original, (char*)original + v, order, memory_order_relaxed)); return original; } @@ -232,7 +232,7 @@ public: { value_type original = load(memory_order_relaxed); do { - } while (!compare_exchange_weak(original, original - v, order, memory_order_relaxed)); + } while (!compare_exchange_weak(original, (char*)original - v, order, memory_order_relaxed)); return original; } @@ -242,7 +242,7 @@ public: return true; } - BOOST_ATOMIC_DECLARE_BASE_OPERATORS + BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS private: base_atomic(const base_atomic &) /* = delete */ ; void operator=(const base_atomic &) /* = delete */ ; diff --git a/include/boost/atomic/detail/gcc-ppc.hpp b/include/boost/atomic/detail/gcc-ppc.hpp index ed975d7..aaeeb96 100644 --- a/include/boost/atomic/detail/gcc-ppc.hpp +++ b/include/boost/atomic/detail/gcc-ppc.hpp @@ -1525,6 +1525,7 @@ template class base_atomic { typedef base_atomic this_type; + typedef ptrdiff_t difference_type; typedef void * value_type; public: BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {} @@ -1643,7 +1644,43 @@ public: return true; } - BOOST_ATOMIC_DECLARE_BASE_OPERATORS + value_type + fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT + { + value_type original, tmp; + ppc_fence_before(order); + __asm__ ( + "1:\n" + "lwarx %0,%y2\n" + "add %1,%0,%3\n" + "stwcx. %1,%y2\n" + "bne- 1b\n" + : "=&b" (original), "=&b" (tmp), "+Z"(v_) + : "b" (v) + : "cc"); + ppc_fence_after(order); + return original; + } + + value_type + fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT + { + value_type original, tmp; + ppc_fence_before(order); + __asm__ ( + "1:\n" + "lwarx %0,%y2\n" + "sub %1,%0,%3\n" + "stwcx. %1,%y2\n" + "bne- 1b\n" + : "=&b" (original), "=&b" (tmp), "+Z"(v_) + : "b" (v) + : "cc"); + ppc_fence_after(order); + return original; + } + + BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS private: base_atomic(const base_atomic &) /* = delete */ ; void operator=(const base_atomic &) /* = delete */ ; @@ -1824,6 +1861,7 @@ template class base_atomic { typedef base_atomic this_type; + typedef ptrdiff_t difference_type; typedef void * value_type; public: BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {} @@ -1942,7 +1980,43 @@ public: return true; } - BOOST_ATOMIC_DECLARE_BASE_OPERATORS + value_type + fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT + { + value_type original, tmp; + ppc_fence_before(order); + __asm__ ( + "1:\n" + "ldarx %0,%y2\n" + "add %1,%0,%3\n" + "stdcx. %1,%y2\n" + "bne- 1b\n" + : "=&b" (original), "=&b" (tmp), "+Z"(v_) + : "b" (v) + : "cc"); + ppc_fence_after(order); + return original; + } + + value_type + fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT + { + value_type original, tmp; + ppc_fence_before(order); + __asm__ ( + "1:\n" + "ldarx %0,%y2\n" + "sub %1,%0,%3\n" + "stdcx. %1,%y2\n" + "bne- 1b\n" + : "=&b" (original), "=&b" (tmp), "+Z"(v_) + : "b" (v) + : "cc"); + ppc_fence_after(order); + return original; + } + + BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS private: base_atomic(const base_atomic &) /* = delete */ ; void operator=(const base_atomic &) /* = delete */ ; diff --git a/include/boost/atomic/detail/gcc-sparcv9.hpp b/include/boost/atomic/detail/gcc-sparcv9.hpp index 81fe729..b524403 100644 --- a/include/boost/atomic/detail/gcc-sparcv9.hpp +++ b/include/boost/atomic/detail/gcc-sparcv9.hpp @@ -792,6 +792,7 @@ template class base_atomic { typedef base_atomic this_type; + typedef ptrdiff_t difference_type; typedef void * value_type; public: BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {} @@ -857,7 +858,24 @@ public: return true; } - BOOST_ATOMIC_DECLARE_BASE_OPERATORS + value_type + fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT + { + value_type tmp = load(memory_order_relaxed); + do {} while(!compare_exchange_weak(tmp, (char*)tmp + v, order, memory_order_relaxed)); + return tmp; + } + + value_type + fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT + { + value_type tmp = load(memory_order_relaxed); + do {} while(!compare_exchange_weak(tmp, (char*)tmp - v, order, memory_order_relaxed)); + return tmp; + } + + BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS + private: base_atomic(const base_atomic &) /* = delete */ ; void operator=(const base_atomic &) /* = delete */ ; diff --git a/include/boost/atomic/detail/gcc-x86.hpp b/include/boost/atomic/detail/gcc-x86.hpp index 78d8caf..1cf9d67 100644 --- a/include/boost/atomic/detail/gcc-x86.hpp +++ b/include/boost/atomic/detail/gcc-x86.hpp @@ -28,6 +28,15 @@ namespace detail { #define BOOST_ATOMIC_X86_PAUSE() __asm__ __volatile__ ("pause\n") +#if defined(__i386__) &&\ + (\ + defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8) ||\ + defined(__i586__) || defined(__i686__) || defined(__pentium4__) || defined(__nocona__) || defined(__core2__) || defined(__corei7__) ||\ + defined(__k6__) || defined(__athlon__) || defined(__k8__) || defined(__amdfam10__) || defined(__bdver1__) || defined(__bdver2__) || defined(__bdver3__) || defined(__btver1__) || defined(__btver2__)\ + ) +#define BOOST_ATOMIC_X86_HAS_CMPXCHG8B 1 +#endif + inline void platform_fence_before(memory_order order) { @@ -198,10 +207,10 @@ public: #define BOOST_ATOMIC_INT_LOCK_FREE 2 #define BOOST_ATOMIC_LONG_LOCK_FREE 2 -#if defined(__x86_64__) +#if defined(__x86_64__) || defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B) #define BOOST_ATOMIC_LLONG_LOCK_FREE 2 #else -#define BOOST_ATOMIC_LLONG_LOCK_FREE 1 +#define BOOST_ATOMIC_LLONG_LOCK_FREE 0 #endif #define BOOST_ATOMIC_POINTER_LOCK_FREE 2 @@ -808,6 +817,7 @@ template class base_atomic { typedef base_atomic this_type; + typedef ptrdiff_t difference_type; typedef void * value_type; public: BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {} @@ -875,7 +885,25 @@ public: return true; } - BOOST_ATOMIC_DECLARE_BASE_OPERATORS + value_type + fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT + { + platform_fence_before(order); + __asm__ ( + "lock ; xaddl %0, %1" + : "+r" (v), "+m" (v_) + ); + platform_fence_after(order); + return reinterpret_cast(v); + } + + value_type + fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT + { + return fetch_add(-v, order); + } + + BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS private: base_atomic(const base_atomic &) /* = delete */ ; void operator=(const base_atomic &) /* = delete */ ; @@ -994,6 +1022,7 @@ template class base_atomic { typedef base_atomic this_type; + typedef ptrdiff_t difference_type; typedef void * value_type; public: BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT : v_(v) {} @@ -1061,7 +1090,25 @@ public: return true; } - BOOST_ATOMIC_DECLARE_BASE_OPERATORS + value_type + fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT + { + platform_fence_before(order); + __asm__ ( + "lock ; xaddq %0, %1" + : "+r" (v), "+m" (v_) + ); + platform_fence_after(order); + return reinterpret_cast(v); + } + + value_type + fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT + { + return fetch_add(-v, order); + } + + BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS private: base_atomic(const base_atomic &) /* = delete */ ; void operator=(const base_atomic &) /* = delete */ ; @@ -1580,11 +1627,11 @@ private: }; #endif -#if !defined(__x86_64__) && (defined(__i686__) || defined (__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8)) +#if !defined(__x86_64__) && defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B) template inline bool -platform_cmpxchg64_strong(T & expected, T desired, volatile T * ptr) +platform_cmpxchg64_strong(T & expected, T desired, volatile T * ptr) BOOST_NOEXCEPT { #ifdef __GCC_HAVE_SYNC_COMPARE_AND_SWAP_8 const T oldval = __sync_val_compare_and_swap(ptr, expected, desired); @@ -1592,7 +1639,7 @@ platform_cmpxchg64_strong(T & expected, T desired, volatile T * ptr) expected = oldval; return result; #else - int scratch; + uint32_t scratch; T prev = expected; /* Make sure ebx is saved and restored properly in case this object is compiled as "position independent". Since @@ -1614,7 +1661,7 @@ platform_cmpxchg64_strong(T & expected, T desired, volatile T * ptr) "lock; cmpxchg8b 0(%4)\n" "movl %1, %%ebx\n" : "=A" (prev), "=m" (scratch) - : "D" ((int)desired), "c" ((int)(desired >> 32)), "S" (ptr), "0" (prev) + : "D" ((uint32_t)desired), "c" ((uint32_t)(desired >> 32)), "S" (ptr), "0" (prev) : "memory"); bool success = (prev == expected); expected = prev; @@ -1622,14 +1669,47 @@ platform_cmpxchg64_strong(T & expected, T desired, volatile T * ptr) #endif } +// Intel 64 and IA-32 Architectures Software Developer's Manual, Volume 3A, 8.1.1. Guaranteed Atomic Operations: +// +// The Pentium processor (and newer processors since) guarantees that the following additional memory operations will always be carried out atomically: +// * Reading or writing a quadword aligned on a 64-bit boundary +// +// Luckily, the memory is almost always 8-byte aligned in our case because atomic<> uses 64 bit native types for storage and dynamic memory allocations +// have at least 8 byte alignment. The only unfortunate case is when atomic is placeod on the stack and it is not 8-byte aligned (like on 32 bit Windows). + template inline void -platform_store64(T value, volatile T * ptr) +platform_store64(T value, volatile T * ptr) BOOST_NOEXCEPT { - T expected = *ptr; - for (; !platform_cmpxchg64_strong(expected, value, ptr);) + if (((uint32_t)ptr & 0x00000007) == 0) { - BOOST_ATOMIC_X86_PAUSE(); +#if defined(__SSE2__) + __asm__ __volatile__ + ( + "movq %1, %%xmm0\n\t" + "movq %%xmm0, %0\n\t" + : "=m" (*ptr) + : "m" (value) + : "memory", "xmm0" + ); +#else + __asm__ __volatile__ + ( + "fildll %1\n\t" + "fistpll %0\n\t" + : "=m" (*ptr) + : "m" (value) + : "memory" + ); +#endif + } + else + { + T expected = *ptr; + while (!platform_cmpxchg64_strong(expected, value, ptr)) + { + BOOST_ATOMIC_X86_PAUSE(); + } } } @@ -1637,12 +1717,37 @@ template inline T platform_load64(const volatile T * ptr) BOOST_NOEXCEPT { - T expected = *ptr; - for (; !platform_cmpxchg64_strong(expected, expected, const_cast(ptr));) + T value = T(); + + if (((uint32_t)ptr & 0x00000007) == 0) { - BOOST_ATOMIC_X86_PAUSE(); +#if defined(__SSE2__) + __asm__ __volatile__ + ( + "movq %1, %%xmm0\n\t" + "movq %%xmm0, %0\n\t" + : "=m" (value) + : "m" (*ptr) + : "memory", "xmm0" + ); +#else + __asm__ __volatile__ + ( + "fildll %1\n\t" + "fistpll %0\n\t" + : "=m" (value) + : "m" (*ptr) + : "memory" + ); +#endif } - return expected; + else + { + // We don't care for comparison result here; the previous value will be stored into value anyway. + platform_cmpxchg64_strong(value, value, const_cast(ptr)); + } + + return value; } #endif @@ -1652,7 +1757,7 @@ platform_load64(const volatile T * ptr) BOOST_NOEXCEPT } /* pull in 64-bit atomic type using cmpxchg8b above */ -#if !defined(__x86_64__) && (defined(__i686__) || defined (__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8)) +#if !defined(__x86_64__) && defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B) #include #endif diff --git a/include/boost/atomic/detail/lockpool.hpp b/include/boost/atomic/detail/lockpool.hpp index ea96cd2..b86cfae 100644 --- a/include/boost/atomic/detail/lockpool.hpp +++ b/include/boost/atomic/detail/lockpool.hpp @@ -61,6 +61,7 @@ public: { private: atomic_flag& flag_; + uint8_t padding[128 - sizeof(atomic_flag)]; scoped_lock(const scoped_lock &) /* = delete */; scoped_lock& operator=(const scoped_lock &) /* = delete */; diff --git a/include/boost/atomic/detail/windows.hpp b/include/boost/atomic/detail/windows.hpp index e01c6a5..0fa9712 100644 --- a/include/boost/atomic/detail/windows.hpp +++ b/include/boost/atomic/detail/windows.hpp @@ -878,6 +878,7 @@ template class base_atomic { typedef base_atomic this_type; + typedef ptrdiff_t difference_type; typedef void* value_type; public: BOOST_CONSTEXPR explicit base_atomic(value_type v) BOOST_NOEXCEPT: v_(v) {} @@ -938,7 +939,22 @@ public: return true; } - BOOST_ATOMIC_DECLARE_BASE_OPERATORS + value_type + fetch_add(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT + { + platform_fence_before(order); + value_type res = (value_type)BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_POINTER(&v_, v); + platform_fence_after(order); + return res; + } + + value_type + fetch_sub(difference_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT + { + return fetch_add(-v, order); + } + + BOOST_ATOMIC_DECLARE_VOID_POINTER_OPERATORS private: base_atomic(const base_atomic &) /* = delete */ ; void operator=(const base_atomic &) /* = delete */ ; diff --git a/test/api_test_helpers.hpp b/test/api_test_helpers.hpp index ec1fbec..f9dbf36 100644 --- a/test/api_test_helpers.hpp +++ b/test/api_test_helpers.hpp @@ -111,8 +111,8 @@ void test_constexpr_ctor() #endif } -template -void test_additive_operators(T value, D delta) +template +void test_additive_operators_with_type(T value, D delta) { /* note: the tests explicitly cast the result of any addition to the type to be tested to force truncation of the result to @@ -122,14 +122,14 @@ void test_additive_operators(T value, D delta) { boost::atomic a(value); T n = a.fetch_add(delta); - BOOST_CHECK( a.load() == T(value + delta) ); + BOOST_CHECK( a.load() == T((AddType)value + delta) ); BOOST_CHECK( n == value ); } { boost::atomic a(value); T n = a.fetch_sub(delta); - BOOST_CHECK( a.load() == T(value - delta) ); + BOOST_CHECK( a.load() == T((AddType)value - delta) ); BOOST_CHECK( n == value ); } @@ -137,47 +137,53 @@ void test_additive_operators(T value, D delta) { boost::atomic a(value); T n = (a += delta); - BOOST_CHECK( a.load() == T(value + delta) ); - BOOST_CHECK( n == T(value + delta) ); + BOOST_CHECK( a.load() == T((AddType)value + delta) ); + BOOST_CHECK( n == T((AddType)value + delta) ); } { boost::atomic a(value); T n = (a -= delta); - BOOST_CHECK( a.load() == T(value - delta) ); - BOOST_CHECK( n == T(value - delta) ); + BOOST_CHECK( a.load() == T((AddType)value - delta) ); + BOOST_CHECK( n == T((AddType)value - delta) ); } /* overloaded increment/decrement */ { boost::atomic a(value); T n = a++; - BOOST_CHECK( a.load() == T(value + 1) ); + BOOST_CHECK( a.load() == T((AddType)value + 1) ); BOOST_CHECK( n == value ); } { boost::atomic a(value); T n = ++a; - BOOST_CHECK( a.load() == T(value + 1) ); - BOOST_CHECK( n == T(value + 1) ); + BOOST_CHECK( a.load() == T((AddType)value + 1) ); + BOOST_CHECK( n == T((AddType)value + 1) ); } { boost::atomic a(value); T n = a--; - BOOST_CHECK( a.load() == T(value - 1) ); + BOOST_CHECK( a.load() == T((AddType)value - 1) ); BOOST_CHECK( n == value ); } { boost::atomic a(value); T n = --a; - BOOST_CHECK( a.load() == T(value - 1) ); - BOOST_CHECK( n == T(value - 1) ); + BOOST_CHECK( a.load() == T((AddType)value - 1) ); + BOOST_CHECK( n == T((AddType)value - 1) ); } } +template +void test_additive_operators(T value, D delta) +{ + test_additive_operators_with_type(value, delta); +} + template void test_additive_wrap(T value) { @@ -275,6 +281,7 @@ void test_pointer_api(void) test_additive_operators(&values[1], 1); test_base_operators(&values[0], &values[1], &values[2]); + test_additive_operators_with_type(&values[1], 1); boost::atomic ptr; boost::atomic integral; diff --git a/test/lockfree.cpp b/test/lockfree.cpp index 8aa54e1..0b030ea 100644 --- a/test/lockfree.cpp +++ b/test/lockfree.cpp @@ -43,7 +43,11 @@ verify_lock_free(const char * type_name, int lock_free_macro_val, int lock_free_ #define EXPECT_SHORT_LOCK_FREE 2 #define EXPECT_INT_LOCK_FREE 2 #define EXPECT_LONG_LOCK_FREE 2 -#define EXPECT_LLONG_LOCK_FREE 1 +#if defined(BOOST_ATOMIC_X86_HAS_CMPXCHG8B) +#define EXPECT_LLONG_LOCK_FREE 2 +#else +#define EXPECT_LLONG_LOCK_FREE 0 +#endif #define EXPECT_POINTER_LOCK_FREE 2 #define EXPECT_BOOL_LOCK_FREE 2