mirror of
https://github.com/boostorg/coroutine.git
synced 2026-02-20 02:32:16 +00:00
coroutine: update of interface
[SVN r85105]
This commit is contained in:
File diff suppressed because it is too large
Load Diff
@@ -136,6 +136,108 @@ public:
|
||||
}
|
||||
};
|
||||
|
||||
template< typename R >
|
||||
class pull_coroutine_base< R & > : private noncopyable
|
||||
{
|
||||
public:
|
||||
typedef intrusive_ptr< pull_coroutine_base > ptr_t;
|
||||
|
||||
private:
|
||||
template<
|
||||
typename X, typename Y, typename Z, typename V, typename W
|
||||
>
|
||||
friend class push_coroutine_object;
|
||||
|
||||
unsigned int use_count_;
|
||||
|
||||
protected:
|
||||
int flags_;
|
||||
exception_ptr except_;
|
||||
coroutine_context caller_;
|
||||
coroutine_context callee_;
|
||||
optional< R * > result_;
|
||||
|
||||
virtual void deallocate_object() = 0;
|
||||
|
||||
public:
|
||||
pull_coroutine_base( coroutine_context::ctx_fn fn,
|
||||
stack_context * stack_ctx,
|
||||
bool unwind, bool preserve_fpu) :
|
||||
use_count_( 0),
|
||||
flags_( 0),
|
||||
except_(),
|
||||
caller_(),
|
||||
callee_( fn, stack_ctx),
|
||||
result_()
|
||||
{
|
||||
if ( unwind) flags_ |= flag_force_unwind;
|
||||
if ( preserve_fpu) flags_ |= flag_preserve_fpu;
|
||||
}
|
||||
|
||||
pull_coroutine_base( coroutine_context const& callee,
|
||||
bool unwind, bool preserve_fpu,
|
||||
optional< R * > const& result) :
|
||||
use_count_( 0),
|
||||
flags_( 0),
|
||||
except_(),
|
||||
caller_(),
|
||||
callee_( callee),
|
||||
result_( result)
|
||||
{
|
||||
if ( unwind) flags_ |= flag_force_unwind;
|
||||
if ( preserve_fpu) flags_ |= flag_preserve_fpu;
|
||||
}
|
||||
|
||||
virtual ~pull_coroutine_base()
|
||||
{}
|
||||
|
||||
bool force_unwind() const BOOST_NOEXCEPT
|
||||
{ return 0 != ( flags_ & flag_force_unwind); }
|
||||
|
||||
bool unwind_requested() const BOOST_NOEXCEPT
|
||||
{ return 0 != ( flags_ & flag_unwind_stack); }
|
||||
|
||||
bool preserve_fpu() const BOOST_NOEXCEPT
|
||||
{ return 0 != ( flags_ & flag_preserve_fpu); }
|
||||
|
||||
bool is_complete() const BOOST_NOEXCEPT
|
||||
{ return 0 != ( flags_ & flag_complete); }
|
||||
|
||||
friend inline void intrusive_ptr_add_ref( pull_coroutine_base * p) BOOST_NOEXCEPT
|
||||
{ ++p->use_count_; }
|
||||
|
||||
friend inline void intrusive_ptr_release( pull_coroutine_base * p) BOOST_NOEXCEPT
|
||||
{ if ( --p->use_count_ == 0) p->deallocate_object(); }
|
||||
|
||||
void pull()
|
||||
{
|
||||
BOOST_ASSERT( ! is_complete() );
|
||||
|
||||
holder< R & > hldr_to( & caller_);
|
||||
holder< R & > * hldr_from(
|
||||
reinterpret_cast< holder< R & > * >(
|
||||
hldr_to.ctx->jump(
|
||||
callee_,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
preserve_fpu() ) ) );
|
||||
BOOST_ASSERT( hldr_from->ctx);
|
||||
callee_ = * hldr_from->ctx;
|
||||
result_ = hldr_from->data;
|
||||
if ( hldr_from->force_unwind) throw forced_unwind();
|
||||
if ( except_) rethrow_exception( except_);
|
||||
}
|
||||
|
||||
bool has_result() const
|
||||
{ return result_; }
|
||||
|
||||
R & get() const
|
||||
{
|
||||
BOOST_ASSERT( has_result() );
|
||||
|
||||
return * result_.get();
|
||||
}
|
||||
};
|
||||
|
||||
template<>
|
||||
class pull_coroutine_base< void > : private noncopyable
|
||||
{
|
||||
|
||||
@@ -48,6 +48,33 @@ private:
|
||||
}
|
||||
};
|
||||
|
||||
template< typename R, typename Allocator >
|
||||
class pull_coroutine_caller< R &, Allocator > : public pull_coroutine_base< R & >
|
||||
{
|
||||
public:
|
||||
typedef typename Allocator::template rebind<
|
||||
pull_coroutine_caller< R &, Allocator >
|
||||
>::other allocator_t;
|
||||
|
||||
pull_coroutine_caller( coroutine_context const& callee, bool unwind, bool preserve_fpu,
|
||||
allocator_t const& alloc, optional< R * > const& data) BOOST_NOEXCEPT :
|
||||
pull_coroutine_base< R & >( callee, unwind, preserve_fpu, data),
|
||||
alloc_( alloc)
|
||||
{}
|
||||
|
||||
void deallocate_object()
|
||||
{ destroy_( alloc_, this); }
|
||||
|
||||
private:
|
||||
allocator_t alloc_;
|
||||
|
||||
static void destroy_( allocator_t & alloc, pull_coroutine_caller * p)
|
||||
{
|
||||
alloc.destroy( p);
|
||||
alloc.deallocate( p, 1);
|
||||
}
|
||||
};
|
||||
|
||||
template< typename Allocator >
|
||||
class pull_coroutine_caller< void, Allocator > : public pull_coroutine_base< void >
|
||||
{
|
||||
|
||||
@@ -399,6 +399,362 @@ public:
|
||||
{ destroy_( alloc_, this); }
|
||||
};
|
||||
|
||||
template<
|
||||
typename R, typename Fn,
|
||||
typename StackAllocator, typename Allocator,
|
||||
typename Caller
|
||||
>
|
||||
class pull_coroutine_object< R &, Fn, StackAllocator, Allocator, Caller > :
|
||||
private stack_tuple< StackAllocator >,
|
||||
public pull_coroutine_base< R & >
|
||||
{
|
||||
public:
|
||||
typedef typename Allocator::template rebind<
|
||||
pull_coroutine_object<
|
||||
R &, Fn, StackAllocator, Allocator, Caller
|
||||
>
|
||||
>::other allocator_t;
|
||||
|
||||
private:
|
||||
typedef stack_tuple< StackAllocator > pbase_type;
|
||||
typedef pull_coroutine_base< R & > base_type;
|
||||
|
||||
Fn fn_;
|
||||
allocator_t alloc_;
|
||||
|
||||
static void destroy_( allocator_t & alloc, pull_coroutine_object * p)
|
||||
{
|
||||
alloc.destroy( p);
|
||||
alloc.deallocate( p, 1);
|
||||
}
|
||||
|
||||
pull_coroutine_object( pull_coroutine_object &);
|
||||
pull_coroutine_object & operator=( pull_coroutine_object const&);
|
||||
|
||||
void enter_()
|
||||
{
|
||||
holder< R * > * hldr_from(
|
||||
reinterpret_cast< holder< R * > * >(
|
||||
this->caller_.jump(
|
||||
this->callee_,
|
||||
reinterpret_cast< intptr_t >( this),
|
||||
this->preserve_fpu() ) ) );
|
||||
this->callee_ = * hldr_from->ctx;
|
||||
this->result_ = hldr_from->data;
|
||||
if ( this->except_) rethrow_exception( this->except_);
|
||||
}
|
||||
|
||||
void unwind_stack_() BOOST_NOEXCEPT
|
||||
{
|
||||
BOOST_ASSERT( ! this->is_complete() );
|
||||
|
||||
this->flags_ |= flag_unwind_stack;
|
||||
holder< R * > hldr_to( & this->caller_, true);
|
||||
this->caller_.jump(
|
||||
this->callee_,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
this->preserve_fpu() );
|
||||
this->flags_ &= ~flag_unwind_stack;
|
||||
|
||||
BOOST_ASSERT( this->is_complete() );
|
||||
}
|
||||
|
||||
public:
|
||||
#ifndef BOOST_NO_CXX11_RVALUE_REFERENCES
|
||||
pull_coroutine_object( Fn && fn, attributes const& attr,
|
||||
StackAllocator const& stack_alloc,
|
||||
allocator_t const& alloc) :
|
||||
pbase_type( stack_alloc, attr.size),
|
||||
base_type(
|
||||
trampoline1< pull_coroutine_object >,
|
||||
& this->stack_ctx,
|
||||
stack_unwind == attr.do_unwind,
|
||||
fpu_preserved == attr.preserve_fpu),
|
||||
fn_( forward< Fn >( fn) ),
|
||||
alloc_( alloc)
|
||||
{ enter_(); }
|
||||
#else
|
||||
pull_coroutine_object( Fn fn, attributes const& attr,
|
||||
StackAllocator const& stack_alloc,
|
||||
allocator_t const& alloc) :
|
||||
pbase_type( stack_alloc, attr.size),
|
||||
base_type(
|
||||
trampoline1< pull_coroutine_object >,
|
||||
& this->stack_ctx,
|
||||
stack_unwind == attr.do_unwind,
|
||||
fpu_preserved == attr.preserve_fpu),
|
||||
fn_( fn),
|
||||
alloc_( alloc)
|
||||
{ enter_(); }
|
||||
|
||||
pull_coroutine_object( BOOST_RV_REF( Fn) fn, attributes const& attr,
|
||||
StackAllocator const& stack_alloc,
|
||||
allocator_t const& alloc) :
|
||||
pbase_type( stack_alloc, attr.size),
|
||||
base_type(
|
||||
trampoline1< pull_coroutine_object >,
|
||||
& this->stack_ctx,
|
||||
stack_unwind == attr.do_unwind,
|
||||
fpu_preserved == attr.preserve_fpu),
|
||||
fn_( fn),
|
||||
alloc_( alloc)
|
||||
{ enter_(); }
|
||||
#endif
|
||||
|
||||
~pull_coroutine_object()
|
||||
{
|
||||
if ( ! this->is_complete() && this->force_unwind() )
|
||||
unwind_stack_();
|
||||
}
|
||||
|
||||
void run()
|
||||
{
|
||||
coroutine_context callee;
|
||||
coroutine_context caller;
|
||||
|
||||
{
|
||||
// create push_coroutine
|
||||
Caller c( this->caller_, false, this->preserve_fpu(), alloc_);
|
||||
try
|
||||
{ fn_( c); }
|
||||
catch ( forced_unwind const&)
|
||||
{}
|
||||
catch (...)
|
||||
{ this->except_ = current_exception(); }
|
||||
callee = c.impl_->callee_;
|
||||
}
|
||||
|
||||
this->flags_ |= flag_complete;
|
||||
holder< R * > hldr_to( & caller);
|
||||
caller.jump(
|
||||
callee,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
this->preserve_fpu() );
|
||||
BOOST_ASSERT_MSG( false, "pull_coroutine is complete");
|
||||
}
|
||||
|
||||
void deallocate_object()
|
||||
{ destroy_( alloc_, this); }
|
||||
};
|
||||
|
||||
template<
|
||||
typename R, typename Fn,
|
||||
typename StackAllocator, typename Allocator,
|
||||
typename Caller
|
||||
>
|
||||
class pull_coroutine_object< R &, reference_wrapper< Fn >, StackAllocator, Allocator, Caller > :
|
||||
private stack_tuple< StackAllocator >,
|
||||
public pull_coroutine_base< R & >
|
||||
{
|
||||
public:
|
||||
typedef typename Allocator::template rebind<
|
||||
pull_coroutine_object<
|
||||
R &, Fn, StackAllocator, Allocator, Caller
|
||||
>
|
||||
>::other allocator_t;
|
||||
|
||||
private:
|
||||
typedef stack_tuple< StackAllocator > pbase_type;
|
||||
typedef pull_coroutine_base< R & > base_type;
|
||||
|
||||
Fn fn_;
|
||||
allocator_t alloc_;
|
||||
|
||||
static void destroy_( allocator_t & alloc, pull_coroutine_object * p)
|
||||
{
|
||||
alloc.destroy( p);
|
||||
alloc.deallocate( p, 1);
|
||||
}
|
||||
|
||||
pull_coroutine_object( pull_coroutine_object &);
|
||||
pull_coroutine_object & operator=( pull_coroutine_object const&);
|
||||
|
||||
void enter_()
|
||||
{
|
||||
holder< R * > * hldr_from(
|
||||
reinterpret_cast< holder< R * > * >(
|
||||
this->caller_.jump(
|
||||
this->callee_,
|
||||
reinterpret_cast< intptr_t >( this),
|
||||
this->preserve_fpu() ) ) );
|
||||
this->callee_ = * hldr_from->ctx;
|
||||
if ( this->except_) rethrow_exception( this->except_);
|
||||
}
|
||||
|
||||
void unwind_stack_() BOOST_NOEXCEPT
|
||||
{
|
||||
BOOST_ASSERT( ! this->is_complete() );
|
||||
|
||||
this->flags_ |= flag_unwind_stack;
|
||||
holder< R * > hldr_to( & this->caller_, true);
|
||||
this->caller_.jump(
|
||||
this->callee_,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
this->preserve_fpu() );
|
||||
this->flags_ &= ~flag_unwind_stack;
|
||||
|
||||
BOOST_ASSERT( this->is_complete() );
|
||||
}
|
||||
|
||||
public:
|
||||
pull_coroutine_object( reference_wrapper< Fn > fn, attributes const& attr,
|
||||
StackAllocator const& stack_alloc,
|
||||
allocator_t const& alloc) :
|
||||
pbase_type( stack_alloc, attr.size),
|
||||
base_type(
|
||||
trampoline1< pull_coroutine_object >,
|
||||
& this->stack_ctx,
|
||||
stack_unwind == attr.do_unwind,
|
||||
fpu_preserved == attr.preserve_fpu),
|
||||
fn_( fn),
|
||||
alloc_( alloc)
|
||||
{ enter_(); }
|
||||
|
||||
~pull_coroutine_object()
|
||||
{
|
||||
if ( ! this->is_complete() && this->force_unwind() )
|
||||
unwind_stack_();
|
||||
}
|
||||
|
||||
void run()
|
||||
{
|
||||
coroutine_context callee;
|
||||
coroutine_context caller;
|
||||
|
||||
{
|
||||
// create pull_coroutine
|
||||
Caller c( this->caller_, false, this->preserve_fpu(), alloc_);
|
||||
try
|
||||
{ fn_( c); }
|
||||
catch ( forced_unwind const&)
|
||||
{}
|
||||
catch (...)
|
||||
{ this->except_ = current_exception(); }
|
||||
callee = c.impl_->callee_;
|
||||
}
|
||||
|
||||
this->flags_ |= flag_complete;
|
||||
holder< R * > hldr_to( & caller);
|
||||
caller.jump(
|
||||
callee,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
this->preserve_fpu() );
|
||||
BOOST_ASSERT_MSG( false, "pull_coroutine is complete");
|
||||
}
|
||||
|
||||
void deallocate_object()
|
||||
{ destroy_( alloc_, this); }
|
||||
};
|
||||
|
||||
template<
|
||||
typename R, typename Fn,
|
||||
typename StackAllocator, typename Allocator,
|
||||
typename Caller
|
||||
>
|
||||
class pull_coroutine_object< R &, const reference_wrapper< Fn >, StackAllocator, Allocator, Caller > :
|
||||
private stack_tuple< StackAllocator >,
|
||||
public pull_coroutine_base< R & >
|
||||
{
|
||||
public:
|
||||
typedef typename Allocator::template rebind<
|
||||
pull_coroutine_object<
|
||||
R &, Fn, StackAllocator, Allocator, Caller
|
||||
>
|
||||
>::other allocator_t;
|
||||
|
||||
private:
|
||||
typedef stack_tuple< StackAllocator > pbase_type;
|
||||
typedef pull_coroutine_base< R & > base_type;
|
||||
|
||||
Fn fn_;
|
||||
allocator_t alloc_;
|
||||
|
||||
static void destroy_( allocator_t & alloc, pull_coroutine_object * p)
|
||||
{
|
||||
alloc.destroy( p);
|
||||
alloc.deallocate( p, 1);
|
||||
}
|
||||
|
||||
pull_coroutine_object( pull_coroutine_object &);
|
||||
pull_coroutine_object & operator=( pull_coroutine_object const&);
|
||||
|
||||
void enter_()
|
||||
{
|
||||
holder< R * > * hldr_from(
|
||||
reinterpret_cast< holder< R * > * >(
|
||||
this->caller_.jump(
|
||||
this->callee_,
|
||||
reinterpret_cast< intptr_t >( this),
|
||||
this->preserve_fpu() ) ) );
|
||||
this->callee_ = * hldr_from->ctx;
|
||||
if ( this->except_) rethrow_exception( this->except_);
|
||||
}
|
||||
|
||||
void unwind_stack_() BOOST_NOEXCEPT
|
||||
{
|
||||
BOOST_ASSERT( ! this->is_complete() );
|
||||
|
||||
this->flags_ |= flag_unwind_stack;
|
||||
holder< R * > hldr_to( & this->caller_, true);
|
||||
this->caller_.jump(
|
||||
this->callee_,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
this->preserve_fpu() );
|
||||
this->flags_ &= ~flag_unwind_stack;
|
||||
|
||||
BOOST_ASSERT( this->is_complete() );
|
||||
}
|
||||
|
||||
public:
|
||||
pull_coroutine_object( const reference_wrapper< Fn > fn, attributes const& attr,
|
||||
StackAllocator const& stack_alloc,
|
||||
allocator_t const& alloc) :
|
||||
pbase_type( stack_alloc, attr.size),
|
||||
base_type(
|
||||
trampoline1< pull_coroutine_object >,
|
||||
& this->stack_ctx,
|
||||
stack_unwind == attr.do_unwind,
|
||||
fpu_preserved == attr.preserve_fpu),
|
||||
fn_( fn),
|
||||
alloc_( alloc)
|
||||
{ enter_(); }
|
||||
|
||||
~pull_coroutine_object()
|
||||
{
|
||||
if ( ! this->is_complete() && this->force_unwind() )
|
||||
unwind_stack_();
|
||||
}
|
||||
|
||||
void run()
|
||||
{
|
||||
coroutine_context callee;
|
||||
coroutine_context caller;
|
||||
|
||||
{
|
||||
// create pull_coroutine
|
||||
Caller c( this->caller_, false, this->preserve_fpu(), alloc_);
|
||||
try
|
||||
{ fn_( c); }
|
||||
catch ( forced_unwind const&)
|
||||
{}
|
||||
catch (...)
|
||||
{ this->except_ = current_exception(); }
|
||||
callee = c.impl_->callee_;
|
||||
}
|
||||
|
||||
this->flags_ |= flag_complete;
|
||||
holder< R * > hldr_to( & caller);
|
||||
caller.jump(
|
||||
callee,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
this->preserve_fpu() );
|
||||
BOOST_ASSERT_MSG( false, "pull_coroutine is complete");
|
||||
}
|
||||
|
||||
void deallocate_object()
|
||||
{ destroy_( alloc_, this); }
|
||||
};
|
||||
|
||||
template<
|
||||
typename Fn,
|
||||
typename StackAllocator, typename Allocator,
|
||||
|
||||
@@ -122,7 +122,7 @@ public:
|
||||
{
|
||||
BOOST_ASSERT( ! is_complete() );
|
||||
|
||||
holder< Arg > hldr_to( & caller_, arg);
|
||||
holder< Arg > hldr_to( & caller_, boost::forward( arg) );
|
||||
holder< Arg > * hldr_from(
|
||||
reinterpret_cast< holder< Arg > * >(
|
||||
hldr_to.ctx->jump(
|
||||
@@ -135,7 +135,7 @@ public:
|
||||
if ( except_) rethrow_exception( except_);
|
||||
}
|
||||
#else
|
||||
void push( Arg arg)
|
||||
void push( Arg const& arg)
|
||||
{
|
||||
BOOST_ASSERT( ! is_complete() );
|
||||
|
||||
@@ -244,96 +244,9 @@ public:
|
||||
{
|
||||
BOOST_ASSERT( ! is_complete() );
|
||||
|
||||
holder< Arg & > hldr_to( & caller_, arg);
|
||||
holder< Arg & > * hldr_from(
|
||||
reinterpret_cast< holder< Arg & > * >(
|
||||
hldr_to.ctx->jump(
|
||||
callee_,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
preserve_fpu() ) ) );
|
||||
BOOST_ASSERT( hldr_from->ctx);
|
||||
callee_ = * hldr_from->ctx;
|
||||
if ( hldr_from->force_unwind) throw forced_unwind();
|
||||
if ( except_) rethrow_exception( except_);
|
||||
}
|
||||
};
|
||||
|
||||
template< typename Arg >
|
||||
class push_coroutine_base< Arg const& > : private noncopyable
|
||||
{
|
||||
public:
|
||||
typedef intrusive_ptr< push_coroutine_base > ptr_t;
|
||||
|
||||
private:
|
||||
template<
|
||||
typename X, typename Y, typename Z, typename V, typename W
|
||||
>
|
||||
friend class pull_coroutine_object;
|
||||
|
||||
unsigned int use_count_;
|
||||
|
||||
protected:
|
||||
int flags_;
|
||||
exception_ptr except_;
|
||||
coroutine_context caller_;
|
||||
coroutine_context callee_;
|
||||
|
||||
virtual void deallocate_object() = 0;
|
||||
|
||||
public:
|
||||
push_coroutine_base( coroutine_context::ctx_fn fn,
|
||||
stack_context * stack_ctx,
|
||||
bool unwind, bool preserve_fpu) :
|
||||
use_count_( 0),
|
||||
flags_( 0),
|
||||
except_(),
|
||||
caller_(),
|
||||
callee_( fn, stack_ctx)
|
||||
{
|
||||
if ( unwind) flags_ |= flag_force_unwind;
|
||||
if ( preserve_fpu) flags_ |= flag_preserve_fpu;
|
||||
}
|
||||
|
||||
push_coroutine_base( coroutine_context const& callee,
|
||||
bool unwind, bool preserve_fpu) :
|
||||
use_count_( 0),
|
||||
flags_( 0),
|
||||
except_(),
|
||||
caller_(),
|
||||
callee_( callee)
|
||||
{
|
||||
if ( unwind) flags_ |= flag_force_unwind;
|
||||
if ( preserve_fpu) flags_ |= flag_preserve_fpu;
|
||||
}
|
||||
|
||||
virtual ~push_coroutine_base()
|
||||
{}
|
||||
|
||||
bool force_unwind() const BOOST_NOEXCEPT
|
||||
{ return 0 != ( flags_ & flag_force_unwind); }
|
||||
|
||||
bool unwind_requested() const BOOST_NOEXCEPT
|
||||
{ return 0 != ( flags_ & flag_unwind_stack); }
|
||||
|
||||
bool preserve_fpu() const BOOST_NOEXCEPT
|
||||
{ return 0 != ( flags_ & flag_preserve_fpu); }
|
||||
|
||||
bool is_complete() const BOOST_NOEXCEPT
|
||||
{ return 0 != ( flags_ & flag_complete); }
|
||||
|
||||
friend inline void intrusive_ptr_add_ref( push_coroutine_base * p) BOOST_NOEXCEPT
|
||||
{ ++p->use_count_; }
|
||||
|
||||
friend inline void intrusive_ptr_release( push_coroutine_base * p) BOOST_NOEXCEPT
|
||||
{ if ( --p->use_count_ == 0) p->deallocate_object(); }
|
||||
|
||||
void push( Arg const& arg)
|
||||
{
|
||||
BOOST_ASSERT( ! is_complete() );
|
||||
|
||||
holder< Arg const& > hldr_to( & caller_, arg);
|
||||
holder< Arg const& > * hldr_from(
|
||||
reinterpret_cast< holder< Arg const& > * >(
|
||||
holder< Arg * > hldr_to( & caller_, & arg);
|
||||
holder< Arg * > * hldr_from(
|
||||
reinterpret_cast< holder< Arg * > * >(
|
||||
hldr_to.ctx->jump(
|
||||
callee_,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
|
||||
@@ -428,6 +428,391 @@ public:
|
||||
{ destroy_( alloc_, this); }
|
||||
};
|
||||
|
||||
template<
|
||||
typename Arg, typename Fn,
|
||||
typename StackAllocator, typename Allocator,
|
||||
typename Caller
|
||||
>
|
||||
class push_coroutine_object< Arg &, Fn, StackAllocator, Allocator, Caller > :
|
||||
private stack_tuple< StackAllocator >,
|
||||
public push_coroutine_base< Arg & >
|
||||
{
|
||||
public:
|
||||
typedef typename Allocator::template rebind<
|
||||
push_coroutine_object<
|
||||
Arg &, Fn, StackAllocator, Allocator, Caller
|
||||
>
|
||||
>::other allocator_t;
|
||||
|
||||
private:
|
||||
typedef stack_tuple< StackAllocator > pbase_type;
|
||||
typedef push_coroutine_base< Arg & > base_type;
|
||||
|
||||
Fn fn_;
|
||||
allocator_t alloc_;
|
||||
|
||||
static void destroy_( allocator_t & alloc, push_coroutine_object * p)
|
||||
{
|
||||
alloc.destroy( p);
|
||||
alloc.deallocate( p, 1);
|
||||
}
|
||||
|
||||
push_coroutine_object( push_coroutine_object &);
|
||||
push_coroutine_object & operator=( push_coroutine_object const&);
|
||||
|
||||
void enter_()
|
||||
{
|
||||
holder< void > * hldr_from(
|
||||
reinterpret_cast< holder< void > * >(
|
||||
this->caller_.jump(
|
||||
this->callee_,
|
||||
reinterpret_cast< intptr_t >( this),
|
||||
this->preserve_fpu() ) ) );
|
||||
this->callee_ = * hldr_from->ctx;
|
||||
if ( this->except_) rethrow_exception( this->except_);
|
||||
}
|
||||
|
||||
void unwind_stack_() BOOST_NOEXCEPT
|
||||
{
|
||||
BOOST_ASSERT( ! this->is_complete() );
|
||||
|
||||
this->flags_ |= flag_unwind_stack;
|
||||
holder< Arg * > hldr_to( & this->caller_, true);
|
||||
this->caller_.jump(
|
||||
this->callee_,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
this->preserve_fpu() );
|
||||
this->flags_ &= ~flag_unwind_stack;
|
||||
|
||||
BOOST_ASSERT( this->is_complete() );
|
||||
}
|
||||
|
||||
public:
|
||||
#ifndef BOOST_NO_CXX11_RVALUE_REFERENCES
|
||||
push_coroutine_object( Fn && fn, attributes const& attr,
|
||||
StackAllocator const& stack_alloc,
|
||||
allocator_t const& alloc) :
|
||||
pbase_type( stack_alloc, attr.size),
|
||||
base_type(
|
||||
trampoline1< push_coroutine_object >,
|
||||
& this->stack_ctx,
|
||||
stack_unwind == attr.do_unwind,
|
||||
fpu_preserved == attr.preserve_fpu),
|
||||
fn_( forward< Fn >( fn) ),
|
||||
alloc_( alloc)
|
||||
{ enter_(); }
|
||||
#else
|
||||
push_coroutine_object( Fn fn, attributes const& attr,
|
||||
StackAllocator const& stack_alloc,
|
||||
allocator_t const& alloc) :
|
||||
pbase_type( stack_alloc, attr.size),
|
||||
base_type(
|
||||
trampoline1< push_coroutine_object >,
|
||||
& this->stack_ctx,
|
||||
stack_unwind == attr.do_unwind,
|
||||
fpu_preserved == attr.preserve_fpu),
|
||||
fn_( fn),
|
||||
alloc_( alloc)
|
||||
{ enter_(); }
|
||||
|
||||
push_coroutine_object( BOOST_RV_REF( Fn) fn, attributes const& attr,
|
||||
StackAllocator const& stack_alloc,
|
||||
allocator_t const& alloc) :
|
||||
pbase_type( stack_alloc, attr.size),
|
||||
base_type(
|
||||
trampoline1< push_coroutine_object >,
|
||||
& this->stack_ctx,
|
||||
stack_unwind == attr.do_unwind,
|
||||
fpu_preserved == attr.preserve_fpu),
|
||||
fn_( fn),
|
||||
alloc_( alloc)
|
||||
{ enter_(); }
|
||||
#endif
|
||||
|
||||
~push_coroutine_object()
|
||||
{
|
||||
if ( ! this->is_complete() && this->force_unwind() )
|
||||
unwind_stack_();
|
||||
}
|
||||
|
||||
void run()
|
||||
{
|
||||
coroutine_context callee;
|
||||
coroutine_context caller;
|
||||
|
||||
{
|
||||
holder< void > hldr_to( & caller);
|
||||
holder< Arg * > * hldr_from(
|
||||
reinterpret_cast< holder< Arg * > * >(
|
||||
caller.jump(
|
||||
this->caller_,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
this->preserve_fpu() ) ) );
|
||||
BOOST_ASSERT( hldr_from->ctx);
|
||||
BOOST_ASSERT( hldr_from->data);
|
||||
|
||||
// create pull_coroutine
|
||||
Caller c( * hldr_from->ctx, false, this->preserve_fpu(), alloc_, hldr_from->data);
|
||||
try
|
||||
{ fn_( c); }
|
||||
catch ( forced_unwind const&)
|
||||
{}
|
||||
catch (...)
|
||||
{ this->except_ = current_exception(); }
|
||||
callee = c.impl_->callee_;
|
||||
}
|
||||
|
||||
this->flags_ |= flag_complete;
|
||||
holder< Arg * > hldr_to( & caller);
|
||||
caller.jump(
|
||||
callee,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
this->preserve_fpu() );
|
||||
BOOST_ASSERT_MSG( false, "push_coroutine is complete");
|
||||
}
|
||||
|
||||
void deallocate_object()
|
||||
{ destroy_( alloc_, this); }
|
||||
};
|
||||
|
||||
template<
|
||||
typename Arg, typename Fn,
|
||||
typename StackAllocator, typename Allocator,
|
||||
typename Caller
|
||||
>
|
||||
class push_coroutine_object< Arg &, reference_wrapper< Fn >, StackAllocator, Allocator, Caller > :
|
||||
private stack_tuple< StackAllocator >,
|
||||
public push_coroutine_base< Arg & >
|
||||
{
|
||||
public:
|
||||
typedef typename Allocator::template rebind<
|
||||
push_coroutine_object<
|
||||
Arg &, Fn, StackAllocator, Allocator, Caller
|
||||
>
|
||||
>::other allocator_t;
|
||||
|
||||
private:
|
||||
typedef stack_tuple< StackAllocator > pbase_type;
|
||||
typedef push_coroutine_base< Arg & > base_type;
|
||||
|
||||
Fn fn_;
|
||||
allocator_t alloc_;
|
||||
|
||||
static void destroy_( allocator_t & alloc, push_coroutine_object * p)
|
||||
{
|
||||
alloc.destroy( p);
|
||||
alloc.deallocate( p, 1);
|
||||
}
|
||||
|
||||
push_coroutine_object( push_coroutine_object &);
|
||||
push_coroutine_object & operator=( push_coroutine_object const&);
|
||||
|
||||
void enter_()
|
||||
{
|
||||
holder< void > * hldr_from(
|
||||
reinterpret_cast< holder< void > * >(
|
||||
this->caller_.jump(
|
||||
this->callee_,
|
||||
reinterpret_cast< intptr_t >( this),
|
||||
this->preserve_fpu() ) ) );
|
||||
this->callee_ = * hldr_from->ctx;
|
||||
if ( this->except_) rethrow_exception( this->except_);
|
||||
}
|
||||
|
||||
void unwind_stack_() BOOST_NOEXCEPT
|
||||
{
|
||||
BOOST_ASSERT( ! this->is_complete() );
|
||||
|
||||
this->flags_ |= flag_unwind_stack;
|
||||
holder< Arg * > hldr_to( & this->caller_, true);
|
||||
this->caller_.jump(
|
||||
this->callee_,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
this->preserve_fpu() );
|
||||
this->flags_ &= ~flag_unwind_stack;
|
||||
|
||||
BOOST_ASSERT( this->is_complete() );
|
||||
}
|
||||
|
||||
public:
|
||||
push_coroutine_object( reference_wrapper< Fn > fn, attributes const& attr,
|
||||
StackAllocator const& stack_alloc,
|
||||
allocator_t const& alloc) :
|
||||
pbase_type( stack_alloc, attr.size),
|
||||
base_type(
|
||||
trampoline1< push_coroutine_object >,
|
||||
& this->stack_ctx,
|
||||
stack_unwind == attr.do_unwind,
|
||||
fpu_preserved == attr.preserve_fpu),
|
||||
fn_( fn),
|
||||
alloc_( alloc)
|
||||
{ enter_(); }
|
||||
|
||||
~push_coroutine_object()
|
||||
{
|
||||
if ( ! this->is_complete() && this->force_unwind() )
|
||||
unwind_stack_();
|
||||
}
|
||||
|
||||
void run()
|
||||
{
|
||||
coroutine_context callee;
|
||||
coroutine_context caller;
|
||||
|
||||
{
|
||||
holder< void > hldr_to( & caller);
|
||||
holder< Arg * > * hldr_from(
|
||||
reinterpret_cast< holder< Arg * > * >(
|
||||
caller.jump(
|
||||
this->caller_,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
this->preserve_fpu() ) ) );
|
||||
BOOST_ASSERT( hldr_from->ctx);
|
||||
BOOST_ASSERT( hldr_from->data);
|
||||
|
||||
// create pull_coroutine
|
||||
Caller c( * hldr_from->ctx, false, this->preserve_fpu(), alloc_, hldr_from->data);
|
||||
try
|
||||
{ fn_( c); }
|
||||
catch ( forced_unwind const&)
|
||||
{}
|
||||
catch (...)
|
||||
{ this->except_ = current_exception(); }
|
||||
callee = c.impl_->callee_;
|
||||
}
|
||||
|
||||
this->flags_ |= flag_complete;
|
||||
holder< Arg * > hldr_to( & caller);
|
||||
caller.jump(
|
||||
callee,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
this->preserve_fpu() );
|
||||
BOOST_ASSERT_MSG( false, "push_coroutine is complete");
|
||||
}
|
||||
|
||||
void deallocate_object()
|
||||
{ destroy_( alloc_, this); }
|
||||
};
|
||||
|
||||
template<
|
||||
typename Arg, typename Fn,
|
||||
typename StackAllocator, typename Allocator,
|
||||
typename Caller
|
||||
>
|
||||
class push_coroutine_object< Arg &, const reference_wrapper< Fn >, StackAllocator, Allocator, Caller > :
|
||||
private stack_tuple< StackAllocator >,
|
||||
public push_coroutine_base< Arg & >
|
||||
{
|
||||
public:
|
||||
typedef typename Allocator::template rebind<
|
||||
push_coroutine_object<
|
||||
Arg, Fn, StackAllocator, Allocator, Caller
|
||||
>
|
||||
>::other allocator_t;
|
||||
|
||||
private:
|
||||
typedef stack_tuple< StackAllocator > pbase_type;
|
||||
typedef push_coroutine_base< Arg & > base_type;
|
||||
|
||||
Fn fn_;
|
||||
allocator_t alloc_;
|
||||
|
||||
static void destroy_( allocator_t & alloc, push_coroutine_object * p)
|
||||
{
|
||||
alloc.destroy( p);
|
||||
alloc.deallocate( p, 1);
|
||||
}
|
||||
|
||||
push_coroutine_object( push_coroutine_object &);
|
||||
push_coroutine_object & operator=( push_coroutine_object const&);
|
||||
|
||||
void enter_()
|
||||
{
|
||||
holder< void > * hldr_from(
|
||||
reinterpret_cast< holder< void > * >(
|
||||
this->caller_.jump(
|
||||
this->callee_,
|
||||
reinterpret_cast< intptr_t >( this),
|
||||
this->preserve_fpu() ) ) );
|
||||
this->callee_ = * hldr_from->ctx;
|
||||
if ( this->except_) rethrow_exception( this->except_);
|
||||
}
|
||||
|
||||
void unwind_stack_() BOOST_NOEXCEPT
|
||||
{
|
||||
BOOST_ASSERT( ! this->is_complete() );
|
||||
|
||||
this->flags_ |= flag_unwind_stack;
|
||||
holder< Arg * > hldr_to( & this->caller_, true);
|
||||
this->caller_.jump(
|
||||
this->callee_,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
this->preserve_fpu() );
|
||||
this->flags_ &= ~flag_unwind_stack;
|
||||
|
||||
BOOST_ASSERT( this->is_complete() );
|
||||
}
|
||||
|
||||
public:
|
||||
push_coroutine_object( const reference_wrapper< Fn > fn, attributes const& attr,
|
||||
StackAllocator const& stack_alloc,
|
||||
allocator_t const& alloc) :
|
||||
pbase_type( stack_alloc, attr.size),
|
||||
base_type(
|
||||
trampoline1< push_coroutine_object >,
|
||||
& this->stack_ctx,
|
||||
stack_unwind == attr.do_unwind,
|
||||
fpu_preserved == attr.preserve_fpu),
|
||||
fn_( fn),
|
||||
alloc_( alloc)
|
||||
{ enter_(); }
|
||||
|
||||
~push_coroutine_object()
|
||||
{
|
||||
if ( ! this->is_complete() && this->force_unwind() )
|
||||
unwind_stack_();
|
||||
}
|
||||
|
||||
void run()
|
||||
{
|
||||
coroutine_context callee;
|
||||
coroutine_context caller;
|
||||
|
||||
{
|
||||
holder< void > hldr_to( & caller);
|
||||
holder< Arg * > * hldr_from(
|
||||
reinterpret_cast< holder< Arg * > * >(
|
||||
caller.jump(
|
||||
this->caller_,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
this->preserve_fpu() ) ) );
|
||||
BOOST_ASSERT( hldr_from->ctx);
|
||||
BOOST_ASSERT( hldr_from->data);
|
||||
|
||||
// create pull_coroutine
|
||||
Caller c( * hldr_from->ctx, false, this->preserve_fpu(), alloc_, hldr_from->data);
|
||||
try
|
||||
{ fn_( c); }
|
||||
catch ( forced_unwind const&)
|
||||
{}
|
||||
catch (...)
|
||||
{ this->except_ = current_exception(); }
|
||||
callee = c.impl_->callee_;
|
||||
}
|
||||
|
||||
this->flags_ |= flag_complete;
|
||||
holder< Arg * > hldr_to( & caller);
|
||||
caller.jump(
|
||||
callee,
|
||||
reinterpret_cast< intptr_t >( & hldr_to),
|
||||
this->preserve_fpu() );
|
||||
BOOST_ASSERT_MSG( false, "push_coroutine is complete");
|
||||
}
|
||||
|
||||
void deallocate_object()
|
||||
{ destroy_( alloc_, this); }
|
||||
};
|
||||
|
||||
template<
|
||||
typename Fn,
|
||||
typename StackAllocator, typename Allocator,
|
||||
|
||||
Reference in New Issue
Block a user