2
0
mirror of https://github.com/boostorg/atomic.git synced 2026-02-02 20:32:09 +00:00

Reverted commit 86144, which was misapplied and not required. The support for 64-bit atomic ops on 32-bit x86 was already present before.

[SVN r86320]
This commit is contained in:
Andrey Semashev
2013-10-15 11:08:22 +00:00
parent 9606e3f321
commit 94359f6aea
2 changed files with 0 additions and 31 deletions

View File

@@ -107,14 +107,10 @@
#else // defined(_M_AMD64) || defined(_M_IA64)
#pragma intrinsic(_InterlockedCompareExchange64)
#define BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE_POINTER(dest, exchange, compare) ((void*)_InterlockedCompareExchange((long*)(dest), (long)(exchange), (long)(compare)))
#define BOOST_ATOMIC_INTERLOCKED_EXCHANGE_POINTER(dest, newval) ((void*)_InterlockedExchange((long*)(dest), (long)(newval)))
#define BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD_POINTER(dest, byte_offset) ((void*)BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD((long*)(dest), byte_offset))
#define BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE64(dest, exchange, compare) _InterlockedCompareExchange64((__int64*)(dest), (__int64)(exchange), (__int64)(compare))
#endif // defined(_M_AMD64) || defined(_M_IA64)
#else // defined(_MSC_VER) && _MSC_VER >= 1400

View File

@@ -1310,19 +1310,10 @@ public:
value_type
fetch_add(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
#if defined(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64)
platform_fence_before(order);
v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE_ADD64(&v_, v));
platform_fence_after(order);
return v;
#else
value_type tmp = load(memory_order_relaxed);
for (; !compare_exchange_weak(tmp, tmp + v, order, memory_order_relaxed);)
{
BOOST_ATOMIC_X86_PAUSE();
}
return tmp;
#endif
}
value_type
@@ -1335,19 +1326,10 @@ public:
value_type
exchange(value_type v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
#if defined(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64)
platform_fence_before(order);
v = static_cast< value_type >(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64(&v_, v));
platform_fence_after(order);
return v;
#else
value_type tmp = load(memory_order_relaxed);
for (; !compare_exchange_weak(tmp, v, order, memory_order_relaxed);)
{
BOOST_ATOMIC_X86_PAUSE();
}
return tmp;
#endif
}
bool
@@ -1492,7 +1474,6 @@ public:
value_type
exchange(value_type const& v, memory_order order = memory_order_seq_cst) volatile BOOST_NOEXCEPT
{
#if defined(BOOST_ATOMIC_INTERLOCKED_EXCHANGE64)
storage_type tmp = 0;
memcpy(&tmp, &v, sizeof(value_type));
platform_fence_before(order);
@@ -1501,14 +1482,6 @@ public:
value_type res;
memcpy(&res, &tmp, sizeof(value_type));
return res;
#else
value_type cur = load(memory_order_relaxed);
for (; !compare_exchange_weak(cur, v, order, memory_order_relaxed);)
{
BOOST_ATOMIC_X86_PAUSE();
}
return cur;
#endif
}
bool