2
0
mirror of https://github.com/boostorg/atomic.git synced 2026-02-02 08:22:08 +00:00

Added generic implementation of extended ops. Reorganized platform macros.

This commit is contained in:
Andrey Semashev
2015-09-13 21:25:58 +03:00
parent 65d0d557f1
commit 6d7c0ec2ee
4 changed files with 192 additions and 40 deletions

View File

@@ -19,7 +19,7 @@
#include <boost/atomic/detail/int_sizes.hpp>
#if !defined(BOOST_ATOMIC_EMULATED)
#include BOOST_ATOMIC_DETAIL_HEADER(boost/atomic/detail/caps_)
#include BOOST_ATOMIC_DETAIL_BACKEND_HEADER(boost/atomic/detail/caps_)
#endif
#ifdef BOOST_HAS_PRAGMA_ONCE

View File

@@ -0,0 +1,142 @@
/*
* Distributed under the Boost Software License, Version 1.0.
* (See accompanying file LICENSE_1_0.txt or copy at
* http://www.boost.org/LICENSE_1_0.txt)
*
* Copyright (c) 2015 Andrey Semashev
*/
/*!
* \file atomic/detail/ext_ops_generic.hpp
*
* This header contains generic implementation of the extended atomic operations.
*/
#ifndef BOOST_ATOMIC_DETAIL_EXT_OPS_GENERIC_HPP_INCLUDED_
#define BOOST_ATOMIC_DETAIL_EXT_OPS_GENERIC_HPP_INCLUDED_
#include <boost/memory_order.hpp>
#include <boost/atomic/detail/config.hpp>
#include <boost/atomic/detail/storage_type.hpp>
#include <boost/atomic/capabilities.hpp>
#ifdef BOOST_HAS_PRAGMA_ONCE
#pragma once
#endif
namespace boost {
namespace atomics {
namespace detail {
template< typename Base, std::size_t Size, bool Signed >
struct generic_extended_operations :
public Base
{
typedef Base base_type;
typedef typename base_type::storage_type storage_type;
typedef typename make_storage_type< Size, Signed >::type emulated_storage_type;
typedef typename make_storage_type< Size, false >::type unsigned_emulated_storage_type;
static BOOST_FORCEINLINE storage_type fetch_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
storage_type old_val;
atomics::detail::non_atomic_load(storage, old_val);
while (!Base::compare_exchange_weak(storage, old_val, static_cast< storage_type >(-static_cast< emulated_storage_type >(old_val)), order, memory_order_relaxed)) {}
return old_val;
}
static BOOST_FORCEINLINE storage_type fetch_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
return Base::fetch_xor(storage, static_cast< storage_type >(~static_cast< emulated_storage_type >(0)), order);
}
static BOOST_FORCEINLINE void opaque_add(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
Base::fetch_add(storage, v, order);
}
static BOOST_FORCEINLINE void opaque_sub(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
Base::fetch_sub(storage, v, order);
}
static BOOST_FORCEINLINE void opaque_negate(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
fetch_negate(storage, order);
}
static BOOST_FORCEINLINE void opaque_and(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
Base::fetch_and(storage, v, order);
}
static BOOST_FORCEINLINE void opaque_or(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
Base::fetch_or(storage, v, order);
}
static BOOST_FORCEINLINE void opaque_xor(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
Base::fetch_xor(storage, v, order);
}
static BOOST_FORCEINLINE void opaque_complement(storage_type volatile& storage, memory_order order) BOOST_NOEXCEPT
{
fetch_complement(storage, order);
}
static BOOST_FORCEINLINE bool add_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type old_val = Base::fetch_add(storage, v, order);
emulated_storage_type new_val = static_cast< emulated_storage_type >(old_val) + static_cast< emulated_storage_type >(v);
return !new_val;
}
static BOOST_FORCEINLINE bool sub_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
storage_type old_val = Base::fetch_sub(storage, v, order);
emulated_storage_type new_val = static_cast< emulated_storage_type >(old_val) - static_cast< emulated_storage_type >(v);
return !new_val;
}
static BOOST_FORCEINLINE bool and_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
return !(Base::fetch_and(storage, v, order) & v);
}
static BOOST_FORCEINLINE bool or_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
return !(Base::fetch_or(storage, v, order) | v);
}
static BOOST_FORCEINLINE bool xor_and_test(storage_type volatile& storage, storage_type v, memory_order order) BOOST_NOEXCEPT
{
return !(Base::fetch_xor(storage, v, order) ^ v);
}
static BOOST_FORCEINLINE bool bit_test_and_set(storage_type volatile& storage, unsigned int bit_number, memory_order order) BOOST_NOEXCEPT
{
storage_type mask = storage_type(((unsigned_emulated_storage_type)1u) << bit_number);
storage_type old_val = Base::fetch_or(storage, mask, order);
return !!(old_val & mask);
}
static BOOST_FORCEINLINE bool bit_test_and_reset(storage_type volatile& storage, unsigned int bit_number, memory_order order) BOOST_NOEXCEPT
{
storage_type mask = storage_type(((unsigned_emulated_storage_type)1u) << bit_number);
storage_type old_val = Base::fetch_and(storage, ~mask, order);
return !!(old_val & mask);
}
static BOOST_FORCEINLINE bool bit_test_and_complement(storage_type volatile& storage, unsigned int bit_number, memory_order order) BOOST_NOEXCEPT
{
storage_type mask = storage_type(((unsigned_emulated_storage_type)1u) << bit_number);
storage_type old_val = Base::fetch_xor(storage, mask, order);
return !!(old_val & mask);
}
};
} // namespace detail
} // namespace atomics
} // namespace boost
#endif // BOOST_ATOMIC_DETAIL_EXT_OPS_GENERIC_HPP_INCLUDED_

View File

@@ -18,7 +18,7 @@
#include <boost/atomic/detail/platform.hpp>
#if !defined(BOOST_ATOMIC_EMULATED)
#include BOOST_ATOMIC_DETAIL_HEADER(boost/atomic/detail/ops_)
#include BOOST_ATOMIC_DETAIL_BACKEND_HEADER(boost/atomic/detail/ops_)
#else
#include <boost/atomic/detail/operations_fwd.hpp>
#endif

View File

@@ -23,27 +23,11 @@
#if !defined(BOOST_ATOMIC_FORCE_FALLBACK)
// Compiler-based backends
#if (defined(__ibmxl__) || defined(__IBMCPP__)) && defined(__PPC__)
// Determine the target platform.
// The target platform describes the compiler and target architecture. It can be used by more generic backends, such as the ones
// based on compiler intrinsics, to implement specialized operations in a non-generic way.
// IBM XL C++ Compiler has to be checked before GCC/Clang as it pretends to be one but does not support __atomic* intrinsics.
// It does support GCC inline assembler though.
#define BOOST_ATOMIC_DETAIL_PLATFORM gcc_ppc
#elif ((defined(__GNUC__) && ((__GNUC__ * 100 + __GNUC_MINOR__) >= 407)) ||\
(defined(BOOST_CLANG) && ((__clang_major__ * 100 + __clang_minor__) >= 302))) &&\
(\
(__GCC_ATOMIC_BOOL_LOCK_FREE + 0) == 2 ||\
(__GCC_ATOMIC_CHAR_LOCK_FREE + 0) == 2 ||\
(__GCC_ATOMIC_SHORT_LOCK_FREE + 0) == 2 ||\
(__GCC_ATOMIC_INT_LOCK_FREE + 0) == 2 ||\
(__GCC_ATOMIC_LONG_LOCK_FREE + 0) == 2 ||\
(__GCC_ATOMIC_LLONG_LOCK_FREE + 0) == 2\
)
#define BOOST_ATOMIC_DETAIL_PLATFORM gcc_atomic
#elif (defined(__GNUC__) || defined(__SUNPRO_CC)) && (defined(__i386__) || defined(__x86_64__))
#if defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__))
#define BOOST_ATOMIC_DETAIL_PLATFORM gcc_x86
@@ -74,17 +58,6 @@
#define BOOST_ATOMIC_DETAIL_PLATFORM gcc_alpha
#elif defined(__GNUC__) && ((__GNUC__ * 100 + __GNUC_MINOR__) >= 401) &&\
(\
defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_1) ||\
defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_2) ||\
defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) ||\
defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8) ||\
defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_16)\
)
#define BOOST_ATOMIC_DETAIL_PLATFORM gcc_sync
#elif defined(_MSC_VER) && (defined(_M_IX86) || defined(_M_X64))
#define BOOST_ATOMIC_DETAIL_PLATFORM msvc_x86
@@ -95,28 +68,65 @@
#endif
// Compiler-based backends
// IBM XL C++ Compiler has to be checked before GCC/Clang as it pretends to be one but does not support __atomic* intrinsics.
// It does support GCC inline assembler though.
#if !(defined(__ibmxl__) || defined(__IBMCPP__)) &&\
((defined(__GNUC__) && ((__GNUC__ * 100 + __GNUC_MINOR__) >= 407)) ||\
(defined(BOOST_CLANG) && ((__clang_major__ * 100 + __clang_minor__) >= 302))) &&\
(\
(__GCC_ATOMIC_BOOL_LOCK_FREE + 0) == 2 ||\
(__GCC_ATOMIC_CHAR_LOCK_FREE + 0) == 2 ||\
(__GCC_ATOMIC_SHORT_LOCK_FREE + 0) == 2 ||\
(__GCC_ATOMIC_INT_LOCK_FREE + 0) == 2 ||\
(__GCC_ATOMIC_LONG_LOCK_FREE + 0) == 2 ||\
(__GCC_ATOMIC_LLONG_LOCK_FREE + 0) == 2\
)
#define BOOST_ATOMIC_DETAIL_BACKEND gcc_atomic
#elif defined(BOOST_ATOMIC_DETAIL_PLATFORM)
#define BOOST_ATOMIC_DETAIL_BACKEND BOOST_ATOMIC_DETAIL_PLATFORM
#elif defined(__GNUC__) && ((__GNUC__ * 100 + __GNUC_MINOR__) >= 401) &&\
(\
defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_1) ||\
defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_2) ||\
defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_4) ||\
defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_8) ||\
defined(__GCC_HAVE_SYNC_COMPARE_AND_SWAP_16)\
)
#define BOOST_ATOMIC_DETAIL_BACKEND gcc_sync
#endif
// OS-based backends
#if !defined(BOOST_ATOMIC_DETAIL_PLATFORM)
#if !defined(BOOST_ATOMIC_DETAIL_BACKEND)
#if defined(__linux__) && defined(__arm__)
#define BOOST_ATOMIC_DETAIL_PLATFORM linux_arm
#define BOOST_ATOMIC_DETAIL_BACKEND linux_arm
#elif defined(BOOST_WINDOWS) || defined(_WIN32_CE)
#define BOOST_ATOMIC_DETAIL_PLATFORM windows
#define BOOST_ATOMIC_DETAIL_BACKEND windows
#endif
#endif // !defined(BOOST_ATOMIC_DETAIL_PLATFORM)
#endif // !defined(BOOST_ATOMIC_DETAIL_BACKEND)
#endif // !defined(BOOST_ATOMIC_FORCE_FALLBACK)
#if !defined(BOOST_ATOMIC_DETAIL_PLATFORM)
#define BOOST_ATOMIC_DETAIL_PLATFORM emulated
#if !defined(BOOST_ATOMIC_DETAIL_BACKEND)
#define BOOST_ATOMIC_DETAIL_BACKEND emulated
#define BOOST_ATOMIC_EMULATED
#endif
#define BOOST_ATOMIC_DETAIL_HEADER(prefix) <BOOST_JOIN(prefix, BOOST_ATOMIC_DETAIL_PLATFORM).hpp>
#define BOOST_ATOMIC_DETAIL_PLATFROM_HEADER(prefix) <BOOST_JOIN(prefix, BOOST_ATOMIC_DETAIL_PLATFROM).hpp>
#define BOOST_ATOMIC_DETAIL_BACKEND_HEADER(prefix) <BOOST_JOIN(prefix, BOOST_ATOMIC_DETAIL_BACKEND).hpp>
#endif // BOOST_ATOMIC_DETAIL_PLATFORM_HPP_INCLUDED_