2
0
mirror of https://github.com/boostorg/atomic.git synced 2026-02-02 08:22:08 +00:00

Added MSVC backend for extra operations.

This commit is contained in:
Andrey Semashev
2017-07-10 21:38:16 +03:00
parent 3622ce85c7
commit 538b411c4a
6 changed files with 976 additions and 3 deletions

View File

@@ -0,0 +1,27 @@
/*
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* http://www.boost.org/LICENSE_1_0.txt)
*
* Copyright (c) 2017 Andrey Semashev
*/
/*!
* \file atomic/detail/extra_operations.hpp
*
* This header defines extra atomic operations, including the generic version.
*/
#ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPERATIONS_HPP_INCLUDED_
#define BOOST_ATOMIC_DETAIL_EXTRA_OPERATIONS_HPP_INCLUDED_
#include <boost/atomic/detail/extra_ops_generic.hpp>
#if !defined(BOOST_ATOMIC_DETAIL_EXTRA_BACKEND_GENERIC)
#include BOOST_ATOMIC_DETAIL_EXTRA_BACKEND_HEADER(boost/atomic/detail/extra_ops_)
#endif
#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
#endif // BOOST_ATOMIC_DETAIL_EXTRA_OPERATIONS_HPP_INCLUDED_

View File

@@ -0,0 +1,35 @@
/*
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* http://www.boost.org/LICENSE_1_0.txt)
*
* Copyright (c) 2017 Andrey Semashev
*/
/*!
* \file atomic/detail/extra_operations_fwd.hpp
*
* This header contains forward declaration of the \c extra_operations template.
*/
#ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPERATIONS_FWD_HPP_INCLUDED_
#define BOOST_ATOMIC_DETAIL_EXTRA_OPERATIONS_FWD_HPP_INCLUDED_
#include <cstddef>
#include <boost/atomic/detail/config.hpp>
#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
namespace boost {
namespace atomics {
namespace detail {
template< typename Base, std::size_t Size, bool Signed >
struct extra_operations;
} // namespace detail
} // namespace atomics
} // namespace boost
#endif // BOOST_ATOMIC_DETAIL_EXTRA_OPERATIONS_FWD_HPP_INCLUDED_

View File

@@ -29,9 +29,9 @@ namespace boost {
namespace atomics {
namespace detail {
// Default extra_operations template definition will be used unless specialized for a specific platform
//! Generic implementation of extra operations
template< typename Base, std::size_t Size, bool Signed >
struct extra_operations :
struct generic_extra_operations :
public Base
{
typedef Base base_type;
@@ -138,6 +138,13 @@ struct extra_operations :
}
};
// Default extra_operations template definition will be used unless specialized for a specific platform
template< typename Base, std::size_t Size, bool Signed >
struct extra_operations :
public generic_extra_operations< Base, Size, Signed >
{
};
} // namespace detail
} // namespace atomics
} // namespace boost

View File

@@ -0,0 +1,106 @@
/*
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* http://www.boost.org/LICENSE_1_0.txt)
*
* Copyright (c) 2017 Andrey Semashev
*/
/*!
* \file atomic/detail/extra_ops_msvc_arm.hpp
*
* This header contains implementation of the extra atomic operations for ARM.
*/
#ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_MSVC_ARM_HPP_INCLUDED_
#define BOOST_ATOMIC_DETAIL_EXTRA_OPS_MSVC_ARM_HPP_INCLUDED_
#include <cstddef>
#include <boost/memory_order.hpp>
#include <boost/atomic/detail/config.hpp>
#include <boost/atomic/detail/interlocked.hpp>
#include <boost/atomic/detail/storage_type.hpp>
#include <boost/atomic/detail/extra_operations_fwd.hpp>
#include <boost/atomic/detail/extra_ops_generic.hpp>
#include <boost/atomic/capabilities.hpp>
#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
namespace boost {
namespace atomics {
namespace detail {
#if defined(BOOST_ATOMIC_INTERLOCKED_BTS) && defined(BOOST_ATOMIC_INTERLOCKED_BTR)
template< typename Base, std::size_t Size, bool Signed >
struct extra_operations< Base, 4u, Signed > :
public generic_extra_operations< Base, 4u, Signed >
{
typedef generic_extra_operations< Base, 4u, Signed > base_type;
typedef typename base_type::storage_type storage_type;
static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order order) BOOST_NOEXCEPT
{
#if defined(BOOST_ATOMIC_INTERLOCKED_BTS_RELAXED) && defined(BOOST_ATOMIC_INTERLOCKED_BTS_ACQUIRE) && defined(BOOST_ATOMIC_INTERLOCKED_BTS_RELEASE)
bool result;
switch (order)
{
case memory_order_relaxed:
result = !!BOOST_ATOMIC_INTERLOCKED_BTS_RELAXED(&storage, bit_number);
break;
case memory_order_consume:
case memory_order_acquire:
result = !!BOOST_ATOMIC_INTERLOCKED_BTS_ACQUIRE(&storage, bit_number);
break;
case memory_order_release:
result = !!BOOST_ATOMIC_INTERLOCKED_BTS_RELEASE(&storage, bit_number);
break;
case memory_order_acq_rel:
case memory_order_seq_cst:
default:
result = !!BOOST_ATOMIC_INTERLOCKED_BTS(&storage, bit_number);
break;
}
return result;
#else
return !!BOOST_ATOMIC_INTERLOCKED_BTS(&storage, bit_number);
#endif
}
static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order order) BOOST_NOEXCEPT
{
#if defined(BOOST_ATOMIC_INTERLOCKED_BTR_RELAXED) && defined(BOOST_ATOMIC_INTERLOCKED_BTR_ACQUIRE) && defined(BOOST_ATOMIC_INTERLOCKED_BTR_RELEASE)
bool result;
switch (order)
{
case memory_order_relaxed:
result = !!BOOST_ATOMIC_INTERLOCKED_BTR_RELAXED(&storage, bit_number);
break;
case memory_order_consume:
case memory_order_acquire:
result = !!BOOST_ATOMIC_INTERLOCKED_BTR_ACQUIRE(&storage, bit_number);
break;
case memory_order_release:
result = !!BOOST_ATOMIC_INTERLOCKED_BTR_RELEASE(&storage, bit_number);
break;
case memory_order_acq_rel:
case memory_order_seq_cst:
default:
result = !!BOOST_ATOMIC_INTERLOCKED_BTR(&storage, bit_number);
break;
}
return result;
#else
return !!BOOST_ATOMIC_INTERLOCKED_BTR(&storage, bit_number);
#endif
}
};
#endif // defined(BOOST_ATOMIC_INTERLOCKED_BTS) && defined(BOOST_ATOMIC_INTERLOCKED_BTR)
} // namespace detail
} // namespace atomics
} // namespace boost
#endif // BOOST_ATOMIC_DETAIL_EXTRA_OPS_MSVC_ARM_HPP_INCLUDED_

View File

@@ -0,0 +1,764 @@
/*
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* http://www.boost.org/LICENSE_1_0.txt)
*
* Copyright (c) 2017 Andrey Semashev
*/
/*!
* \file atomic/detail/extra_ops_msvc_x86.hpp
*
* This header contains implementation of the extra atomic operations for x86.
*/
#ifndef BOOST_ATOMIC_DETAIL_EXTRA_OPS_MSVC_X86_HPP_INCLUDED_
#define BOOST_ATOMIC_DETAIL_EXTRA_OPS_MSVC_X86_HPP_INCLUDED_
#include <cstddef>
#include <boost/memory_order.hpp>
#include <boost/atomic/detail/config.hpp>
#include <boost/atomic/detail/interlocked.hpp>
#include <boost/atomic/detail/storage_type.hpp>
#include <boost/atomic/detail/extra_operations_fwd.hpp>
#include <boost/atomic/detail/extra_ops_generic.hpp>
#include <boost/atomic/capabilities.hpp>
#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
#if defined(BOOST_MSVC)
#pragma warning(push)
// frame pointer register 'ebx' modified by inline assembly code
#pragma warning(disable: 4731)
#endif
namespace boost {
namespace atomics {
namespace detail {
#if defined(_M_IX86) || (defined(BOOST_ATOMIC_INTERLOCKED_BTS) && defined(BOOST_ATOMIC_INTERLOCKED_BTR))
template< typename Base, std::size_t Size, bool Signed >
struct msvc_x86_extra_operations_common :
public generic_extra_operations< Base, Size, Signed >
{
typedef generic_extra_operations< Base, Size, Signed > base_type;
typedef typename base_type::storage_type storage_type;
#if defined(BOOST_ATOMIC_INTERLOCKED_BTS)
static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
{
return !!BOOST_ATOMIC_INTERLOCKED_BTS(&storage, bit_number);
}
#else
static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
mov eax, bit_number
lock bts [edx], eax
setc result
};
base_type::fence_after(order);
return result;
}
#endif
#if defined(BOOST_ATOMIC_INTERLOCKED_BTR)
static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order) BOOST_NOEXCEPT
{
return !!BOOST_ATOMIC_INTERLOCKED_BTR(&storage, bit_number);
}
#else
static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
mov eax, bit_number
lock btr [edx], eax
setc result
};
base_type::fence_after(order);
return result;
}
#endif
#if defined(_M_IX86)
static BOOST_FORCEINLINE bool bit_test_and_complement(storage_type volatile& storage, unsigned int bit_number, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
mov eax, bit_number
lock btc [edx], eax
setc result
};
base_type::fence_after(order);
return result;
}
#endif
};
template< typename Base, bool Signed >
struct extra_operations< Base, 1u, Signed > :
public msvc_x86_extra_operations_common< Base, 1u, Signed >
{
typedef msvc_x86_extra_operations_common< Base, 1u, Signed > base_type;
typedef typename base_type::storage_type storage_type;
#if defined(_M_IX86)
static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
storage_type old_val;
__asm
{
mov ecx, storage
movzx eax, byte ptr [ecx]
align 16
again:
mov edx, eax
neg dl
lock cmpxchg byte ptr [ecx], dl
jne again
mov old_val, al
};
base_type::fence_after(order);
return old_val;
}
static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
storage_type old_val;
__asm
{
mov ecx, storage
movzx eax, byte ptr [ecx]
align 16
again:
mov edx, eax
not dl
lock cmpxchg byte ptr [ecx], dl
jne again
mov old_val, al
};
base_type::fence_after(order);
return old_val;
}
static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
movzx eax, v
lock add byte ptr [edx], al
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
movzx eax, v
lock sub byte ptr [edx], al
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
lock neg byte ptr [edx]
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
movzx eax, v
lock and byte ptr [edx], al
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
movzx eax, v
lock or byte ptr [edx], al
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
movzx eax, v
lock xor byte ptr [edx], al
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
lock not byte ptr [edx]
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
movzx eax, v
lock add byte ptr [edx], al
setz result
};
base_type::fence_after(order);
return result;
}
static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
movzx eax, v
lock sub byte ptr [edx], al
setz result
};
base_type::fence_after(order);
return result;
}
static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
movzx eax, v
lock and byte ptr [edx], al
setz result
};
base_type::fence_after(order);
return result;
}
static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
movzx eax, v
lock or byte ptr [edx], al
setz result
};
base_type::fence_after(order);
return result;
}
static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
movzx eax, v
lock xor byte ptr [edx], al
setz result
};
base_type::fence_after(order);
return result;
}
#endif // defined(_M_IX86)
};
template< typename Base, bool Signed >
struct extra_operations< Base, 2u, Signed > :
public msvc_x86_extra_operations_common< Base, 2u, Signed >
{
typedef msvc_x86_extra_operations_common< Base, 2u, Signed > base_type;
typedef typename base_type::storage_type storage_type;
#if defined(_M_IX86)
static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
storage_type old_val;
__asm
{
mov ecx, storage
movzx eax, word ptr [ecx]
align 16
again:
mov edx, eax
neg dx
lock cmpxchg word ptr [ecx], dx
jne again
mov old_val, ax
};
base_type::fence_after(order);
return old_val;
}
static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
storage_type old_val;
__asm
{
mov ecx, storage
movzx eax, word ptr [ecx]
align 16
again:
mov edx, eax
not dx
lock cmpxchg word ptr [ecx], dx
jne again
mov old_val, ax
};
base_type::fence_after(order);
return old_val;
}
static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
movzx eax, v
lock add word ptr [edx], ax
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
movzx eax, v
lock sub word ptr [edx], ax
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
lock neg word ptr [edx]
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
movzx eax, v
lock and word ptr [edx], ax
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
movzx eax, v
lock or word ptr [edx], ax
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
movzx eax, v
lock xor word ptr [edx], ax
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
lock not word ptr [edx]
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
movzx eax, v
lock add word ptr [edx], ax
setz result
};
base_type::fence_after(order);
return result;
}
static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
movzx eax, v
lock sub word ptr [edx], ax
setz result
};
base_type::fence_after(order);
return result;
}
static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
movzx eax, v
lock and word ptr [edx], ax
setz result
};
base_type::fence_after(order);
return result;
}
static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
movzx eax, v
lock or word ptr [edx], ax
setz result
};
base_type::fence_after(order);
return result;
}
static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
movzx eax, v
lock xor word ptr [edx], ax
setz result
};
base_type::fence_after(order);
return result;
}
#endif // defined(_M_IX86)
};
template< typename Base, bool Signed >
struct extra_operations< Base, 4u, Signed > :
public msvc_x86_extra_operations_common< Base, 4u, Signed >
{
typedef msvc_x86_extra_operations_common< Base, 4u, Signed > base_type;
typedef typename base_type::storage_type storage_type;
#if defined(_M_IX86)
static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
storage_type old_val;
__asm
{
mov ecx, storage
mov eax, dword ptr [ecx]
align 16
again:
mov edx, eax
neg edx
lock cmpxchg dword ptr [ecx], edx
jne again
mov old_val, eax
};
base_type::fence_after(order);
return old_val;
}
static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
storage_type old_val;
__asm
{
mov ecx, storage
mov eax, dword ptr [ecx]
align 16
again:
mov edx, eax
not edx
lock cmpxchg dword ptr [ecx], edx
jne again
mov old_val, eax
};
base_type::fence_after(order);
return old_val;
}
static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
mov eax, v
lock add dword ptr [edx], eax
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
mov eax, v
lock sub dword ptr [edx], eax
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
lock neg dword ptr [edx]
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
mov eax, v
lock and dword ptr [edx], eax
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
mov eax, v
lock or dword ptr [edx], eax
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
mov eax, v
lock xor dword ptr [edx], eax
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
__asm
{
mov edx, storage
lock not dword ptr [edx]
};
base_type::fence_after(order);
}
static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
mov eax, v
lock add dword ptr [edx], eax
setz result
};
base_type::fence_after(order);
return result;
}
static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
mov eax, v
lock sub dword ptr [edx], eax
setz result
};
base_type::fence_after(order);
return result;
}
static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
mov eax, v
lock and dword ptr [edx], eax
setz result
};
base_type::fence_after(order);
return result;
}
static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
mov eax, v
lock or dword ptr [edx], eax
setz result
};
base_type::fence_after(order);
return result;
}
static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
base_type::fence_before(order);
bool result;
__asm
{
mov edx, storage
mov eax, v
lock xor dword ptr [edx], eax
setz result
};
base_type::fence_after(order);
return result;
}
#endif // defined(_M_IX86)
};
#endif // defined(_M_IX86) || (defined(BOOST_ATOMIC_INTERLOCKED_BTS) && defined(BOOST_ATOMIC_INTERLOCKED_BTR))
#if defined(BOOST_ATOMIC_INTERLOCKED_BTS64) && defined(BOOST_ATOMIC_INTERLOCKED_BTR64)
template< typename Base, bool Signed >
struct extra_operations< Base, 8u, Signed > :
public generic_extra_operations< Base, 8u, Signed >
{
typedef generic_extra_operations< Base, 8u, Signed > base_type;
typedef typename base_type::storage_type storage_type;
static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order order) BOOST_NOEXCEPT
{
return !!BOOST_ATOMIC_INTERLOCKED_BTS64(&storage, bit_number);
}
static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order order) BOOST_NOEXCEPT
{
return !!BOOST_ATOMIC_INTERLOCKED_BTR64(&storage, bit_number);
}
};
#endif // defined(BOOST_ATOMIC_INTERLOCKED_BTS64) && defined(BOOST_ATOMIC_INTERLOCKED_BTR64)
} // namespace detail
} // namespace atomics
} // namespace boost
#if defined(BOOST_MSVC)
#pragma warning(pop)
#endif
#endif // BOOST_ATOMIC_DETAIL_EXTRA_OPS_MSVC_X86_HPP_INCLUDED_

View File

@@ -2,7 +2,7 @@
#define BOOST_ATOMIC_DETAIL_INTERLOCKED_HPP
// Copyright (c) 2009 Helge Bahmann
// Copyright (c) 2012 - 2014 Andrey Semashev
// Copyright (c) 2012 - 2014, 2017 Andrey Semashev
//
// Distributed under the Boost Software License, Version 1.0.
// See accompanying file LICENSE_1_0.txt or copy at
@@ -73,6 +73,8 @@ extern "C" long __cdecl _InterlockedExchange( long volatile *, long );
#pragma intrinsic(_InterlockedAnd)
#pragma intrinsic(_InterlockedOr)
#pragma intrinsic(_InterlockedXor)
#pragma intrinsic(_interlockedbittestandset)
#pragma intrinsic(_interlockedbittestandreset)
#endif
#define BOOST_ATOMIC_INTERLOCKED_COMPARE_EXCHANGE(dest, exchange, compare) _InterlockedCompareExchange((long*)(dest), (long)(exchange), (long)(compare))
@@ -81,6 +83,18 @@ extern "C" long __cdecl _InterlockedExchange( long volatile *, long );
#define BOOST_ATOMIC_INTERLOCKED_AND(dest, arg) _InterlockedAnd((long*)(dest), (long)(arg))
#define BOOST_ATOMIC_INTERLOCKED_OR(dest, arg) _InterlockedOr((long*)(dest), (long)(arg))
#define BOOST_ATOMIC_INTERLOCKED_XOR(dest, arg) _InterlockedXor((long*)(dest), (long)(arg))
#define BOOST_ATOMIC_INTERLOCKED_BTS(dest, arg) _interlockedbittestandset((long*)(dest), (long)(arg))
#define BOOST_ATOMIC_INTERLOCKED_BTR(dest, arg) _interlockedbittestandreset((long*)(dest), (long)(arg))
#if defined(_M_AMD64)
#if defined(BOOST_MSVC)
#pragma intrinsic(_interlockedbittestandset64)
#pragma intrinsic(_interlockedbittestandreset64)
#endif
#define BOOST_ATOMIC_INTERLOCKED_BTS64(dest, arg) _interlockedbittestandset64((__int64*)(dest), (__int64)(arg))
#define BOOST_ATOMIC_INTERLOCKED_BTR64(dest, arg) _interlockedbittestandreset64((__int64*)(dest), (__int64)(arg))
#endif // defined(_M_AMD64)
#if (defined(_M_IX86) && _M_IX86 >= 500) || defined(_M_AMD64) || defined(_M_IA64)
#if defined(BOOST_MSVC)
@@ -394,6 +408,26 @@ extern "C" long __cdecl _InterlockedExchange( long volatile *, long );
#define BOOST_ATOMIC_INTERLOCKED_XOR64_ACQUIRE(dest, arg) _InterlockedXor64_acq((__int64*)(dest), (__int64)(arg))
#define BOOST_ATOMIC_INTERLOCKED_XOR64_RELEASE(dest, arg) _InterlockedXor64_rel((__int64*)(dest), (__int64)(arg))
#if defined(BOOST_MSVC)
#pragma intrinsic(_interlockedbittestandset_nf)
#pragma intrinsic(_interlockedbittestandset_acq)
#pragma intrinsic(_interlockedbittestandset_rel)
#endif
#define BOOST_ATOMIC_INTERLOCKED_BTS_RELAXED(dest, arg) _interlockedbittestandset_nf((long*)(dest), (long)(arg))
#define BOOST_ATOMIC_INTERLOCKED_BTS_ACQUIRE(dest, arg) _interlockedbittestandset_acq((long*)(dest), (long)(arg))
#define BOOST_ATOMIC_INTERLOCKED_BTS_RELEASE(dest, arg) _interlockedbittestandset_rel((long*)(dest), (long)(arg))
#if defined(BOOST_MSVC)
#pragma intrinsic(_interlockedbittestandreset_nf)
#pragma intrinsic(_interlockedbittestandreset_acq)
#pragma intrinsic(_interlockedbittestandreset_rel)
#endif
#define BOOST_ATOMIC_INTERLOCKED_BTR_RELAXED(dest, arg) _interlockedbittestandreset_nf((long*)(dest), (long)(arg))
#define BOOST_ATOMIC_INTERLOCKED_BTR_ACQUIRE(dest, arg) _interlockedbittestandreset_acq((long*)(dest), (long)(arg))
#define BOOST_ATOMIC_INTERLOCKED_BTR_RELEASE(dest, arg) _interlockedbittestandreset_rel((long*)(dest), (long)(arg))
#endif // _MSC_VER >= 1700 && defined(_M_ARM)
#endif // _MSC_VER < 1400