The C and C++ Include Header Files
/usr/include/c++/11/experimental/executor
$ cat -n /usr/include/c++/11/experimental/executor 1 //
-*- C++ -*- 2 3 // Copyright (C) 2015-2021 Free Software Foundation, Inc. 4 // 5 // This file is part of the GNU ISO C++ Library. This library is free 6 // software; you can redistribute it and/or modify it under the 7 // terms of the GNU General Public License as published by the 8 // Free Software Foundation; either version 3, or (at your option) 9 // any later version. 10 11 // This library is distributed in the hope that it will be useful, 12 // but WITHOUT ANY WARRANTY; without even the implied warranty of 13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 // GNU General Public License for more details. 15 16 // Under Section 7 of GPL version 3, you are granted additional 17 // permissions described in the GCC Runtime Library Exception, version 18 // 3.1, as published by the Free Software Foundation. 19 20 // You should have received a copy of the GNU General Public License and 21 // a copy of the GCC Runtime Library Exception along with this program; 22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see 23 //
. 24 25 /** @file experimental/executor 26 * This is a TS C++ Library header. 27 * @ingroup networking-ts 28 */ 29 30 #ifndef _GLIBCXX_EXPERIMENTAL_EXECUTOR 31 #define _GLIBCXX_EXPERIMENTAL_EXECUTOR 1 32 33 #pragma GCC system_header 34 35 #if __cplusplus >= 201402L 36 37 #include
38 #include
39 #include
40 #include
41 #include
42 #include
43 #include
44 #include
45 #include
46 #include
47 #include
48 #include
49 #include
50 51 namespace std _GLIBCXX_VISIBILITY(default) 52 { 53 _GLIBCXX_BEGIN_NAMESPACE_VERSION 54 namespace experimental 55 { 56 namespace net 57 { 58 inline namespace v1 59 { 60 61 /** @addtogroup networking-ts 62 * @{ 63 */ 64 65 /// Customization point for asynchronous operations. 66 template
67 class async_result; 68 69 /// Convenience utility to help implement asynchronous operations. 70 template
71 class async_completion; 72 73 template
> 74 struct __associated_allocator_impl 75 { 76 using type = _ProtoAlloc; 77 78 static type 79 _S_get(const _Tp&, const _ProtoAlloc& __a) noexcept { return __a; } 80 }; 81 82 template
83 struct __associated_allocator_impl<_Tp, _ProtoAlloc, 84 __void_t
> 85 { 86 using type = typename _Tp::allocator_type; 87 88 static type 89 _S_get(const _Tp& __t, const _ProtoAlloc&) noexcept 90 { return __t.get_allocator(); } 91 }; 92 93 /// Helper to associate an allocator with a type. 94 template
> 95 struct associated_allocator 96 : __associated_allocator_impl<_Tp, _ProtoAllocator> 97 { 98 static auto 99 get(const _Tp& __t, 100 const _ProtoAllocator& __a = _ProtoAllocator()) noexcept 101 { 102 using _Impl = __associated_allocator_impl<_Tp, _ProtoAllocator>; 103 return _Impl::_S_get(__t, __a); 104 } 105 }; 106 107 /// Alias template for associated_allocator. 108 template
> 109 using associated_allocator_t 110 = typename associated_allocator<_Tp, _ProtoAllocator>::type; 111 112 // get_associated_allocator: 113 114 template
115 inline associated_allocator_t<_Tp> 116 get_associated_allocator(const _Tp& __t) noexcept 117 { return associated_allocator<_Tp>::get(__t); } 118 119 template
120 inline associated_allocator_t<_Tp, _ProtoAllocator> 121 get_associated_allocator(const _Tp& __t, 122 const _ProtoAllocator& __a) noexcept 123 { return associated_allocator<_Tp, _ProtoAllocator>::get(__t, __a); } 124 125 enum class fork_event { prepare, parent, child }; 126 127 /// An extensible, type-safe, polymorphic set of services. 128 class execution_context; 129 130 class service_already_exists : public logic_error 131 { 132 public: 133 // _GLIBCXX_RESOLVE_LIB_DEFECTS 134 // 3414. service_already_exists has no usable constructors 135 service_already_exists() : logic_error("service already exists") { } 136 }; 137 138 template
struct is_executor; 139 140 struct executor_arg_t { }; 141 142 constexpr executor_arg_t executor_arg = executor_arg_t(); 143 144 /// Trait for determining whether to construct an object with an executor. 145 template
struct uses_executor; 146 147 template
> 148 struct __associated_executor_impl 149 { 150 using type = _Executor; 151 152 static type 153 _S_get(const _Tp&, const _Executor& __e) noexcept { return __e; } 154 }; 155 156 template
157 struct __associated_executor_impl<_Tp, _Executor, 158 __void_t
> 159 { 160 using type = typename _Tp::executor_type; 161 162 static type 163 _S_get(const _Tp& __t, const _Executor&) noexcept 164 { return __t.get_executor(); } 165 }; 166 167 /// Helper to associate an executor with a type. 168 template
169 struct associated_executor 170 : __associated_executor_impl<_Tp, _Executor> 171 { 172 static auto 173 get(const _Tp& __t, const _Executor& __e = _Executor()) noexcept 174 { return __associated_executor_impl<_Tp, _Executor>::_S_get(__t, __e); } 175 }; 176 177 178 template
179 using associated_executor_t 180 = typename associated_executor<_Tp, _Executor>::type; 181 182 template
183 using __is_exec_context 184 = is_convertible<_ExecutionContext&, execution_context&>; 185 186 template
187 using __executor_t = typename _Tp::executor_type; 188 189 // get_associated_executor: 190 191 template
192 inline associated_executor_t<_Tp> 193 get_associated_executor(const _Tp& __t) noexcept 194 { return associated_executor<_Tp>::get(__t); } 195 196 template
197 inline 198 enable_if_t
::value, 199 associated_executor_t<_Tp, _Executor>> 200 get_associated_executor(const _Tp& __t, const _Executor& __ex) 201 { return associated_executor<_Tp, _Executor>::get(__t, __ex); } 202 203 template
204 inline 205 enable_if_t<__is_exec_context<_ExecutionContext>::value, 206 associated_executor_t<_Tp, __executor_t<_ExecutionContext>>> 207 get_associated_executor(const _Tp& __t, _ExecutionContext& __ctx) noexcept 208 { return net::get_associated_executor(__t, __ctx.get_executor()); } 209 210 211 /// Helper to bind an executor to an object or function. 212 template
213 class executor_binder; 214 215 template
216 class async_result
, _Signature>; 217 218 template
219 struct associated_allocator
, 220 _ProtoAllocator>; 221 222 template
223 struct associated_executor
, _Executor1>; 224 225 // bind_executor: 226 227 template
228 inline 229 enable_if_t
::value, 230 executor_binder
, _Executor>> 231 bind_executor(const _Executor& __ex, _Tp&& __t) 232 { return { std::forward<_Tp>(__t), __ex }; } 233 234 template
235 inline 236 enable_if_t<__is_exec_context<_ExecutionContext>::value, 237 executor_binder
, __executor_t<_ExecutionContext>>> 238 bind_executor(_ExecutionContext& __ctx, _Tp&& __t) 239 { return { __ctx.get_executor(), forward<_Tp>(__t) }; } 240 241 242 /// A scope-guard type to record when work is started and finished. 243 template
244 class executor_work_guard; 245 246 // make_work_guard: 247 248 template
249 inline 250 enable_if_t
::value, executor_work_guard<_Executor>> 251 make_work_guard(const _Executor& __ex) 252 { return executor_work_guard<_Executor>(__ex); } 253 254 template
255 inline 256 enable_if_t<__is_exec_context<_ExecutionContext>::value, 257 executor_work_guard<__executor_t<_ExecutionContext>>> 258 make_work_guard(_ExecutionContext& __ctx) 259 { return net::make_work_guard(__ctx.get_executor()); } 260 261 template
262 inline 263 enable_if_t<__not_<__or_
, __is_exec_context<_Tp>>>::value, 264 executor_work_guard
>> 265 make_work_guard(const _Tp& __t) 266 { return net::get_associated_executor(__t); } 267 268 template
269 auto 270 make_work_guard(const _Tp& __t, _Up&& __u) 271 -> decltype(net::make_work_guard( 272 net::get_associated_executor(__t, forward<_Up>(__u)))) 273 { 274 return net::make_work_guard( 275 net::get_associated_executor(__t, forward<_Up>(__u))); 276 } 277 278 /// Allows function objects to execute on any thread. 279 class system_executor; 280 281 /// The execution context associated with system_executor objects. 282 class system_context; 283 284 inline bool 285 operator==(const system_executor&, const system_executor&) { return true; } 286 287 inline bool 288 operator!=(const system_executor&, const system_executor&) { return false; } 289 290 /// Exception thrown by empty executors. 291 class bad_executor; 292 293 /// Polymorphic wrapper for types satisfying the Executor requirements. 294 class executor; 295 296 bool 297 operator==(const executor&, const executor&) noexcept; 298 299 bool 300 operator==(const executor&, nullptr_t) noexcept; 301 302 bool 303 operator==(nullptr_t, const executor&) noexcept; 304 305 bool 306 operator!=(const executor&, const executor&) noexcept; 307 308 bool 309 operator!=(const executor&, nullptr_t) noexcept; 310 311 bool 312 operator!=(nullptr_t, const executor&) noexcept; 313 314 void swap(executor&, executor&) noexcept; 315 316 // dispatch: 317 318 template
319 __deduced_t<_CompletionToken, void()> 320 dispatch(_CompletionToken&& __token); 321 322 template
323 __deduced_t<_CompletionToken, void()> 324 dispatch(const _Executor& __ex, _CompletionToken&& __token); 325 326 template
327 __deduced_t<_CompletionToken, void()> 328 dispatch(_ExecutionContext& __ctx, _CompletionToken&& __token); 329 330 // post: 331 332 template
333 __deduced_t<_CompletionToken, void()> 334 post(_CompletionToken&& __token); 335 template
336 enable_if_t
::value, 337 __deduced_t<_CompletionToken, void()>> 338 post(const _Executor& __ex, _CompletionToken&& __token); 339 template
340 enable_if_t<__is_exec_context<_ExecutionContext>::value, 341 __deduced_t<_CompletionToken, void()>> 342 post(_ExecutionContext& __ctx, _CompletionToken&& __token); 343 344 // defer: 345 346 template
347 __deduced_t<_CompletionToken, void()> 348 defer(_CompletionToken&& __token); 349 template
350 __deduced_t<_CompletionToken, void()> 351 defer(const _Executor& __ex, _CompletionToken&& __token); 352 template
353 __deduced_t<_CompletionToken, void()> 354 defer(_ExecutionContext& __ctx, _CompletionToken&& __token); 355 356 template
357 class strand; 358 359 template
360 bool 361 operator==(const strand<_Executor>& __a, const strand<_Executor>& __b); 362 363 template
364 bool 365 operator!=(const strand<_Executor>& __a, const strand<_Executor>& __b) 366 { return !(__a == __b); } 367 368 template
369 class async_result 370 { 371 public: 372 using completion_handler_type = _CompletionToken; 373 using return_type = void; 374 375 explicit async_result(completion_handler_type&) {} 376 async_result(const async_result&) = delete; 377 async_result& operator=(const async_result&) = delete; 378 379 return_type get() {} 380 }; 381 382 template
383 class async_completion 384 { 385 using __result_type 386 = async_result
, _Signature>; 387 388 public: 389 using completion_handler_type 390 = typename __result_type::completion_handler_type; 391 392 private: 393 using __handler_type = conditional_t< 394 is_same<_CompletionToken, completion_handler_type>::value, 395 completion_handler_type&, 396 completion_handler_type>; 397 398 public: 399 explicit 400 async_completion(_CompletionToken& __t) 401 : completion_handler(std::forward<__handler_type>(__t)), 402 result(completion_handler) 403 { } 404 405 async_completion(const async_completion&) = delete; 406 async_completion& operator=(const async_completion&) = delete; 407 408 __handler_type completion_handler; 409 __result_type result; 410 }; 411 412 413 class execution_context 414 { 415 public: 416 class service 417 { 418 protected: 419 // construct / copy / destroy: 420 421 explicit 422 service(execution_context& __owner) : _M_context(__owner) { } 423 424 service(const service&) = delete; 425 service& operator=(const service&) = delete; 426 427 virtual ~service() { } // TODO should not be inline 428 429 // service observers: 430 431 execution_context& context() const noexcept { return _M_context; } 432 433 private: 434 // service operations: 435 436 virtual void shutdown() noexcept = 0; 437 virtual void notify_fork(fork_event) { } 438 439 friend class execution_context; 440 execution_context& _M_context; 441 }; 442 443 // construct / copy / destroy: 444 445 execution_context() { } 446 447 execution_context(const execution_context&) = delete; 448 execution_context& operator=(const execution_context&) = delete; 449 450 virtual ~execution_context() 451 { 452 shutdown(); 453 destroy(); 454 } 455 456 // execution context operations: 457 458 void 459 notify_fork(fork_event __e) 460 { 461 auto __l = [=](auto& __svc) { __svc._M_ptr->notify_fork(__e); }; 462 if (__e == fork_event::prepare) 463 std::for_each(_M_services.rbegin(), _M_services.rend(), __l); 464 else 465 std::for_each(_M_services.begin(), _M_services.end(), __l); 466 } 467 468 protected: 469 // execution context protected operations: 470 471 void 472 shutdown() 473 { 474 std::for_each(_M_services.rbegin(), _M_services.rend(), 475 [=](auto& __svc) { 476 if (__svc._M_active) 477 { 478 __svc._M_ptr->shutdown(); 479 __svc._M_active = false; 480 } 481 }); 482 } 483 484 void 485 destroy() 486 { 487 while (_M_services.size()) 488 _M_services.pop_back(); 489 _M_keys.clear(); 490 } 491 492 protected: 493 494 template
495 static void 496 _S_deleter(service* __svc) { delete static_cast<_Service*>(__svc); } 497 498 struct _ServicePtr 499 { 500 template
501 explicit 502 _ServicePtr(_Service* __svc) 503 : _M_ptr(__svc, &_S_deleter<_Service>), _M_active(true) { } 504 505 std::unique_ptr
_M_ptr; 506 bool _M_active; 507 }; 508 509 #if defined(_GLIBCXX_HAS_GTHREADS) 510 using mutex_type = std::mutex; 511 #else 512 struct mutex_type 513 { 514 void lock() const { } 515 void unlock() const { } 516 }; 517 #endif 518 mutable mutex_type _M_mutex; 519 520 // Sorted in order of beginning of service object lifetime. 521 std::list<_ServicePtr> _M_services; 522 523 template
524 service* 525 _M_add_svc(_Args&&... __args) 526 { 527 _M_services.push_back( 528 _ServicePtr{new _Service{*this, std::forward<_Args>(__args)...}} ); 529 return _M_services.back()._M_ptr.get(); 530 } 531 532 using __key_type = void(*)(); 533 534 template
535 static __key_type 536 _S_key() { return reinterpret_cast<__key_type>(&_S_key<_Key>); } 537 538 std::unordered_map<__key_type, service*> _M_keys; 539 540 template
541 friend typename _Service::key_type& 542 use_service(execution_context&); 543 544 template
545 friend _Service& 546 make_service(execution_context&, _Args&&...); 547 548 template
549 friend bool 550 has_service(const execution_context&) noexcept; 551 }; 552 553 // service access: 554 555 template
556 typename _Service::key_type& 557 use_service(execution_context& __ctx) 558 { 559 using _Key = typename _Service::key_type; 560 static_assert(is_base_of
::value, 561 "a service type must derive from execution_context::service"); 562 static_assert(is_base_of<_Key, _Service>::value, 563 "a service type must match or derive from its key_type"); 564 auto __key = execution_context::_S_key<_Key>(); 565 lock_guard
__lock(__ctx._M_mutex); 566 auto& __svc = __ctx._M_keys[__key]; 567 if (__svc == nullptr) 568 { 569 __try { 570 __svc = __ctx._M_add_svc<_Service>(); 571 } __catch(...) { 572 __ctx._M_keys.erase(__key); 573 __throw_exception_again; 574 } 575 } 576 return static_cast<_Key&>(*__svc); 577 } 578 579 template
580 _Service& 581 make_service(execution_context& __ctx, _Args&&... __args) 582 { 583 using _Key = typename _Service::key_type; 584 static_assert(is_base_of
::value, 585 "a service type must derive from execution_context::service"); 586 static_assert(is_base_of<_Key, _Service>::value, 587 "a service type must match or derive from its key_type"); 588 auto __key = execution_context::_S_key<_Key>(); 589 lock_guard
__lock(__ctx._M_mutex); 590 auto& __svc = __ctx._M_keys[__key]; 591 if (__svc != nullptr) 592 throw service_already_exists(); 593 __try { 594 __svc = __ctx._M_add_svc<_Service>(std::forward<_Args>(__args)...); 595 } __catch(...) { 596 __ctx._M_keys.erase(__key); 597 __throw_exception_again; 598 } 599 return static_cast<_Service&>(*__svc); 600 } 601 602 template
603 inline bool 604 has_service(const execution_context& __ctx) noexcept 605 { 606 using _Key = typename _Service::key_type; 607 static_assert(is_base_of
::value, 608 "a service type must derive from execution_context::service"); 609 static_assert(is_base_of<_Key, _Service>::value, 610 "a service type must match or derive from its key_type"); 611 lock_guard
__lock(__ctx._M_mutex); 612 return __ctx._M_keys.count(execution_context::_S_key<_Key>()); 613 } 614 615 template
> 616 struct __is_executor_impl : false_type 617 { }; 618 619 // Check Executor requirements. 620 template
> 621 auto 622 __executor_reqs(_Up* __x = 0, const _Up* __cx = 0, void(*__f)() = 0, 623 const allocator
& __a = {}) 624 -> enable_if_t<__is_value_constructible<_Tp>::value, __void_t< 625 decltype(*__cx == *__cx), 626 decltype(*__cx != *__cx), 627 decltype(__x->context()), 628 decltype(__x->on_work_started()), 629 decltype(__x->on_work_finished()), 630 decltype(__x->dispatch(std::move(__f), __a)), 631 decltype(__x->post(std::move(__f), __a)), 632 decltype(__x->defer(std::move(__f), __a)) 633 >>; 634 635 template
636 struct __is_executor_impl<_Tp, decltype(__executor_reqs<_Tp>())> 637 : true_type 638 { }; 639 640 template
641 struct is_executor : __is_executor_impl<_Tp> 642 { }; 643 644 template
645 constexpr bool is_executor_v = is_executor<_Tp>::value; 646 647 template
> 648 struct __uses_executor_impl : false_type 649 { }; 650 651 template
652 struct __uses_executor_impl<_Tp, _Executor, 653 __void_t
> 654 : is_convertible<_Executor, typename _Tp::executor_type> 655 { }; 656 657 template
658 struct uses_executor : __uses_executor_impl<_Tp, _Executor>::type 659 { }; 660 661 template
662 constexpr bool uses_executor_v = uses_executor<_Tp, _Executor>::value; 663 664 template
665 class executor_binder 666 { 667 struct __use_exec { }; 668 669 public: 670 // types: 671 672 using target_type = _Tp; 673 using executor_type = _Executor; 674 675 // construct / copy / destroy: 676 677 executor_binder(_Tp __t, const _Executor& __ex) 678 : executor_binder(__use_exec{}, std::move(__t), __ex) 679 { } 680 681 executor_binder(const executor_binder&) = default; 682 executor_binder(executor_binder&&) = default; 683 684 template
685 executor_binder(const executor_binder<_Up, _OtherExecutor>& __other) 686 : executor_binder(__use_exec{}, __other.get(), __other.get_executor()) 687 { } 688 689 template
690 executor_binder(executor_binder<_Up, _OtherExecutor>&& __other) 691 : executor_binder(__use_exec{}, std::move(__other.get()), 692 __other.get_executor()) 693 { } 694 695 template
696 executor_binder(executor_arg_t, const _Executor& __ex, 697 const executor_binder<_Up, _OtherExecutor>& __other) 698 : executor_binder(__use_exec{}, __other.get(), __ex) 699 { } 700 701 template
702 executor_binder(executor_arg_t, const _Executor& __ex, 703 executor_binder<_Up, _OtherExecutor>&& __other) 704 : executor_binder(__use_exec{}, std::move(__other.get()), __ex) 705 { } 706 707 ~executor_binder(); 708 709 // executor binder access: 710 711 _Tp& get() noexcept { return _M_target; } 712 const _Tp& get() const noexcept { return _M_target; } 713 executor_type get_executor() const noexcept { return _M_ex; } 714 715 // executor binder invocation: 716 717 template
718 result_of_t<_Tp&(_Args&&...)> 719 operator()(_Args&&... __args) 720 { return std::__invoke(get(), std::forward<_Args>(__args)...); } 721 722 template
723 result_of_t
724 operator()(_Args&&... __args) const 725 { return std::__invoke(get(), std::forward<_Args>(__args)...); } 726 727 private: 728 template
729 using __use_exec_cond 730 = __and_
, 731 is_constructible<_Tp, executor_arg_t, _Executor, _Up>>; 732 733 template
::value>> 735 executor_binder(__use_exec, _Up&& __u, _Exec&& __ex) 736 : _M_ex(std::forward<_Exec>(__ex)), 737 _M_target(executor_arg, _M_ex, std::forward<_Up>(__u)) 738 { } 739 740 template
::value>> 742 executor_binder(__use_exec, _Up&& __u, const _Exec& __ex) 743 : _M_ex(std::forward<_Exec>(__ex)), 744 _M_target(std::forward<_Up>(__u)) 745 { } 746 747 _Executor _M_ex; 748 _Tp _M_target; 749 }; 750 751 template
752 class async_result
, _Signature> 753 { 754 using __inner = async_result<_Tp, _Signature>; 755 756 public: 757 using completion_handler_type = 758 executor_binder
; 759 760 using return_type = typename __inner::return_type; 761 762 explicit 763 async_result(completion_handler_type& __h) 764 : _M_target(__h.get()) { } 765 766 async_result(const async_result&) = delete; 767 async_result& operator=(const async_result&) = delete; 768 769 return_type get() { return _M_target.get(); } 770 771 private: 772 __inner _M_target; 773 }; 774 775 template
776 struct associated_allocator
, _ProtoAlloc> 777 { 778 using type = associated_allocator_t<_Tp, _ProtoAlloc>; 779 780 static type 781 get(const executor_binder<_Tp, _Executor>& __b, 782 const _ProtoAlloc& __a = _ProtoAlloc()) noexcept 783 { return associated_allocator<_Tp, _ProtoAlloc>::get(__b.get(), __a); } 784 }; 785 786 template
787 struct associated_executor
, _Executor1> 788 { 789 using type = _Executor; 790 791 static type 792 get(const executor_binder<_Tp, _Executor>& __b, 793 const _Executor1& = _Executor1()) noexcept 794 { return __b.get_executor(); } 795 }; 796 797 template
798 class executor_work_guard 799 { 800 public: 801 // types: 802 803 using executor_type = _Executor; 804 805 // construct / copy / destroy: 806 807 explicit 808 executor_work_guard(const executor_type& __ex) noexcept 809 : _M_ex(__ex), _M_owns(true) 810 { _M_ex.on_work_started(); } 811 812 executor_work_guard(const executor_work_guard& __other) noexcept 813 : _M_ex(__other._M_ex), _M_owns(__other._M_owns) 814 { 815 if (_M_owns) 816 _M_ex.on_work_started(); 817 } 818 819 executor_work_guard(executor_work_guard&& __other) noexcept 820 : _M_ex(__other._M_ex), _M_owns(__other._M_owns) 821 { __other._M_owns = false; } 822 823 executor_work_guard& operator=(const executor_work_guard&) = delete; 824 825 ~executor_work_guard() 826 { 827 if (_M_owns) 828 _M_ex.on_work_finished(); 829 } 830 831 // executor work guard observers: 832 833 executor_type get_executor() const noexcept { return _M_ex; } 834 835 bool owns_work() const noexcept { return _M_owns; } 836 837 // executor work guard modifiers: 838 839 void reset() noexcept 840 { 841 if (_M_owns) 842 _M_ex.on_work_finished(); 843 _M_owns = false; 844 } 845 846 private: 847 _Executor _M_ex; 848 bool _M_owns; 849 }; 850 851 852 class system_context : public execution_context 853 { 854 public: 855 // types: 856 857 using executor_type = system_executor; 858 859 // construct / copy / destroy: 860 861 system_context() = delete; 862 system_context(const system_context&) = delete; 863 system_context& operator=(const system_context&) = delete; 864 865 ~system_context() 866 { 867 stop(); 868 join(); 869 } 870 871 // system_context operations: 872 873 executor_type get_executor() noexcept; 874 875 void stop() 876 { 877 lock_guard
__lock(_M_mtx); 878 _M_stopped = true; 879 _M_cv.notify_all(); 880 } 881 882 bool stopped() const noexcept 883 { 884 lock_guard
__lock(_M_mtx); 885 return _M_stopped; 886 } 887 888 void join() 889 { 890 if (_M_thread.joinable()) 891 _M_thread.join(); 892 } 893 894 private: 895 friend system_executor; 896 897 struct __tag { explicit __tag() = default; }; 898 system_context(__tag) { } 899 900 #ifndef _GLIBCXX_HAS_GTHREADS 901 struct thread 902 { 903 bool joinable() const { return false; } 904 void join() { } 905 }; 906 struct condition_variable 907 { 908 void notify_all() { } 909 }; 910 #endif 911 912 thread _M_thread; 913 mutable mutex_type _M_mtx; // XXX can we reuse base's _M_mutex? 914 condition_variable _M_cv; 915 queue
> _M_tasks; 916 bool _M_stopped = false; 917 918 #ifdef _GLIBCXX_HAS_GTHREADS 919 void 920 _M_run() 921 { 922 while (true) 923 { 924 function
__f; 925 { 926 unique_lock
__lock(_M_mtx); 927 _M_cv.wait(__lock, 928 [this]{ return _M_stopped || !_M_tasks.empty(); }); 929 if (_M_stopped) 930 return; 931 __f = std::move(_M_tasks.front()); 932 _M_tasks.pop(); 933 } 934 __f(); 935 } 936 } 937 #endif 938 939 void 940 _M_post(std::function
__f __attribute__((__unused__))) 941 { 942 lock_guard
__lock(_M_mtx); 943 if (_M_stopped) 944 return; 945 #ifdef _GLIBCXX_HAS_GTHREADS 946 if (!_M_thread.joinable()) 947 _M_thread = std::thread(&system_context::_M_run, this); 948 _M_tasks.push(std::move(__f)); // XXX allocator not used 949 _M_cv.notify_one(); 950 #else 951 __throw_system_error(EOPNOTSUPP); 952 #endif 953 } 954 955 static system_context& 956 _S_get() noexcept 957 { 958 static system_context __sc(__tag{}); 959 return __sc; 960 } 961 }; 962 963 class system_executor 964 { 965 public: 966 // executor operations: 967 968 system_executor() { } 969 970 system_context& 971 context() const noexcept { return system_context::_S_get(); } 972 973 void on_work_started() const noexcept { } 974 void on_work_finished() const noexcept { } 975 976 template
977 void 978 dispatch(_Func&& __f, const _ProtoAlloc& __a) const 979 { decay_t<_Func>{std::forward<_Func>(__f)}(); } 980 981 template
982 void 983 post(_Func&& __f, const _ProtoAlloc&) const // XXX allocator not used 984 { 985 system_context::_S_get()._M_post(std::forward<_Func>(__f)); 986 } 987 988 template
989 void 990 defer(_Func&& __f, const _ProtoAlloc& __a) const 991 { post(std::forward<_Func>(__f), __a); } 992 }; 993 994 inline system_executor 995 system_context::get_executor() noexcept 996 { return {}; } 997 998 class bad_executor : public std::exception 999 { 1000 virtual const char* what() const noexcept { return "bad executor"; } 1001 }; 1002 1003 inline void __throw_bad_executor() // TODO make non-inline 1004 { 1005 #if __cpp_exceptions 1006 throw bad_executor(); 1007 #else 1008 __builtin_abort(); 1009 #endif 1010 } 1011 1012 class executor 1013 { 1014 public: 1015 // construct / copy / destroy: 1016 1017 executor() noexcept = default; 1018 1019 executor(nullptr_t) noexcept { } 1020 executor(const executor&) noexcept = default; 1021 executor(executor&&) noexcept = default; 1022 1023 template
1024 executor(_Executor __e) 1025 : _M_target(make_shared<_Tgt1<_Executor>>(std::move(__e))) 1026 { } 1027 1028 template
1029 executor(allocator_arg_t, const _ProtoAlloc& __a, _Executor __e) 1030 : _M_target(allocate_shared<_Tgt2<_Executor, _ProtoAlloc>>(__a, 1031 std::move(__e), __a)) 1032 { } 1033 1034 executor& operator=(const executor&) noexcept = default; 1035 executor& operator=(executor&&) noexcept = default; 1036 1037 executor& 1038 operator=(nullptr_t) noexcept 1039 { 1040 _M_target = nullptr; 1041 return *this; 1042 } 1043 1044 template
1045 executor& 1046 operator=(_Executor __e) 1047 { 1048 executor(std::move(__e)).swap(*this); 1049 return *this; 1050 } 1051 1052 ~executor() = default; 1053 1054 // executor modifiers: 1055 1056 void 1057 swap(executor& __other) noexcept 1058 { _M_target.swap(__other._M_target); } 1059 1060 template
1061 void 1062 assign(_Executor __e, const _Alloc& __a) 1063 { executor(allocator_arg, __a, std::move(__e)).swap(*this); } 1064 1065 // executor operations: 1066 1067 execution_context& 1068 context() const noexcept 1069 { 1070 __glibcxx_assert( _M_target ); 1071 return _M_target->context(); 1072 } 1073 1074 void 1075 on_work_started() const noexcept 1076 { 1077 __glibcxx_assert( _M_target ); 1078 return _M_target->on_work_started(); 1079 } 1080 1081 void 1082 on_work_finished() const noexcept 1083 { 1084 __glibcxx_assert( _M_target ); 1085 return _M_target->on_work_finished(); 1086 } 1087 1088 template
1089 void 1090 dispatch(_Func&& __f, const _Alloc& __a) const 1091 { 1092 if (!_M_target) 1093 __throw_bad_executor(); 1094 // _M_target->dispatch({allocator_arg, __a, std::forward<_Func>(__f)}); 1095 _M_target->dispatch(std::forward<_Func>(__f)); 1096 } 1097 1098 template
1099 void 1100 post(_Func&& __f, const _Alloc& __a) const 1101 { 1102 if (!_M_target) 1103 __throw_bad_executor(); 1104 // _M_target->post({allocator_arg, __a, std::forward<_Func>(__f)}); 1105 _M_target->post(std::forward<_Func>(__f)); 1106 } 1107 1108 template
1109 void 1110 defer(_Func&& __f, const _Alloc& __a) const 1111 { 1112 if (!_M_target) 1113 __throw_bad_executor(); 1114 // _M_target->defer({allocator_arg, __a, std::forward<_Func>(__f)}); 1115 _M_target->defer(std::forward<_Func>(__f)); 1116 } 1117 1118 // executor capacity: 1119 1120 explicit operator bool() const noexcept 1121 { return static_cast
(_M_target); } 1122 1123 // executor target access: 1124 1125 #if __cpp_rtti 1126 const type_info& 1127 target_type() const noexcept 1128 { 1129 if (_M_target) 1130 return *static_cast
(_M_target->target_type()); 1131 return typeid(void); 1132 } 1133 #endif 1134 1135 template
1136 _Executor* 1137 target() noexcept 1138 { 1139 void* __p = nullptr; 1140 if (_M_target) 1141 { 1142 if (_M_target->_M_func == &_Tgt1
>::_S_func) 1143 __p = _M_target->_M_func(_M_target.get(), nullptr); 1144 #if __cpp_rtti 1145 else 1146 __p = _M_target->target(&typeid(_Executor)); 1147 #endif 1148 } 1149 return static_cast<_Executor*>(__p); 1150 } 1151 1152 template
1153 const _Executor* 1154 target() const noexcept 1155 { 1156 const void* __p = nullptr; 1157 if (_M_target) 1158 { 1159 if (_M_target->_M_func == &_Tgt1
>::_S_func) 1160 return (_Executor*)_M_target->_M_func(_M_target.get(), nullptr); 1161 #if __cpp_rtti 1162 else 1163 __p = _M_target->target(&typeid(_Executor)); 1164 #endif 1165 } 1166 return static_cast
(__p); 1167 } 1168 1169 private: 1170 struct _Tgt 1171 { 1172 virtual void on_work_started() const noexcept = 0; 1173 virtual void on_work_finished() const noexcept = 0; 1174 virtual execution_context& context() const noexcept = 0; 1175 virtual void dispatch(std::function
) const = 0; 1176 virtual void post(std::function
) const = 0; 1177 virtual void defer(std::function
) const = 0; 1178 virtual const void* target_type() const noexcept = 0; 1179 virtual void* target(const void*) noexcept = 0; 1180 virtual bool _M_equals(_Tgt*) const noexcept = 0; 1181 1182 using _Func = void* (_Tgt*, const _Tgt*); 1183 _Func* _M_func; // Provides access to target without RTTI 1184 }; 1185 1186 template
1187 struct _Tgt1 : _Tgt 1188 { 1189 explicit 1190 _Tgt1(_Ex&& __ex) 1191 : _M_ex(std::move(__ex)) 1192 { this->_M_func = &_S_func; } 1193 1194 void 1195 on_work_started() const noexcept override 1196 { _M_ex.on_work_started(); } 1197 1198 void 1199 on_work_finished() const noexcept override 1200 { _M_ex.on_work_finished(); } 1201 1202 execution_context& 1203 context() const noexcept override 1204 { return _M_ex.context(); } 1205 1206 void 1207 dispatch(std::function
__f) const override 1208 { _M_ex.dispatch(std::move(__f), allocator
()); } 1209 1210 void 1211 post(std::function
__f) const override 1212 { _M_ex.post(std::move(__f), allocator
()); } 1213 1214 void 1215 defer(std::function
__f) const override 1216 { _M_ex.defer(std::move(__f), allocator
()); } 1217 1218 const void* 1219 target_type() const noexcept override 1220 { 1221 #if __cpp_rtti 1222 return &typeid(_Ex); 1223 #else 1224 return nullptr; 1225 #endif 1226 } 1227 1228 void* 1229 target(const void* __ti) noexcept override 1230 { 1231 #if __cpp_rtti 1232 if (*static_cast
(__ti) == typeid(_Ex)) 1233 return std::__addressof(_M_ex); 1234 #endif 1235 return nullptr; 1236 } 1237 1238 bool 1239 _M_equals(_Tgt* __tgt) const noexcept override 1240 { 1241 #if __cpp_rtti 1242 if (const void* __p = __tgt->target(&typeid(_Ex))) 1243 return *static_cast
(__p) == _M_ex; 1244 #endif 1245 return false; 1246 } 1247 1248 _Ex _M_ex [[__no_unique_address__]]; 1249 1250 static void* 1251 _S_func(_Tgt* __p, const _Tgt* __q) noexcept 1252 { 1253 auto& __ex = static_cast<_Tgt1*>(__p)->_M_ex; 1254 if (__q) 1255 { 1256 if (__ex == static_cast
(__q)->_M_ex) 1257 return __p; 1258 else 1259 return nullptr; 1260 } 1261 else 1262 return std::__addressof(__ex); 1263 } 1264 }; 1265 1266 template
1267 struct _Tgt2 : _Tgt1<_Ex> 1268 { 1269 explicit 1270 _Tgt2(_Ex&& __ex, const _Alloc& __a) 1271 : _Tgt1<_Ex>(std::move(__ex)), _M_alloc(__a) { } 1272 1273 void 1274 dispatch(std::function
__f) const override 1275 { this->_M_ex.dispatch(std::move(__f), _M_alloc); } 1276 1277 void 1278 post(std::function
__f) const override 1279 { this->_M_ex.post(std::move(__f), _M_alloc); } 1280 1281 void 1282 defer(std::function
__f) const override 1283 { this->_M_ex.defer(std::move(__f), _M_alloc); } 1284 1285 _Alloc _M_alloc [[__no_unique_address__]]; 1286 }; 1287 1288 // Partial specialization for std::allocator
. 1289 // Don't store the allocator. 1290 template
1291 struct _Tgt2<_Ex, std::allocator<_Tp>> : _Tgt1<_Ex> 1292 { }; 1293 1294 friend bool 1295 operator==(const executor& __a, const executor& __b) noexcept 1296 { 1297 _Tgt* __ta = __a._M_target.get(); 1298 _Tgt* __tb = __b._M_target.get(); 1299 if (__ta == __tb) 1300 return true; 1301 if (!__ta || !__tb) 1302 return false; 1303 if (__ta->_M_func == __tb->_M_func) 1304 return __ta->_M_func(__ta, __tb); 1305 return __ta->_M_equals(__tb); 1306 } 1307 1308 shared_ptr<_Tgt> _M_target; 1309 }; 1310 1311 template<> struct is_executor
: true_type { }; 1312 1313 /// executor comparisons 1314 inline bool 1315 operator==(const executor& __e, nullptr_t) noexcept 1316 { return !__e; } 1317 1318 inline bool 1319 operator==(nullptr_t, const executor& __e) noexcept 1320 { return !__e; } 1321 1322 inline bool 1323 operator!=(const executor& __a, const executor& __b) noexcept 1324 { return !(__a == __b); } 1325 1326 inline bool 1327 operator!=(const executor& __e, nullptr_t) noexcept 1328 { return (bool)__e; } 1329 1330 inline bool 1331 operator!=(nullptr_t, const executor& __e) noexcept 1332 { return (bool)__e; } 1333 1334 /// Swap two executor objects. 1335 inline void swap(executor& __a, executor& __b) noexcept { __a.swap(__b); } 1336 1337 1338 template
1339 struct __dispatcher 1340 { 1341 explicit 1342 __dispatcher(_CompletionHandler& __h) 1343 : _M_h(std::move(__h)), _M_w(net::make_work_guard(_M_h)) 1344 { } 1345 1346 void operator()() 1347 { 1348 auto __alloc = net::get_associated_allocator(_M_h); 1349 _M_w.get_executor().dispatch(std::move(_M_h), __alloc); 1350 _M_w.reset(); 1351 } 1352 1353 _CompletionHandler _M_h; 1354 decltype(net::make_work_guard(_M_h)) _M_w; 1355 }; 1356 1357 template
1358 inline __dispatcher<_CompletionHandler> 1359 __make_dispatcher(_CompletionHandler& __h) 1360 { return __dispatcher<_CompletionHandler>{__h}; } 1361 1362 1363 1364 // dispatch: 1365 1366 template
1367 inline __deduced_t<_CompletionToken, void()> 1368 dispatch(_CompletionToken&& __token) 1369 { 1370 async_completion<_CompletionToken, void()> __cmpl{__token}; 1371 auto __ex = net::get_associated_executor(__cmpl.completion_handler); 1372 auto __alloc = net::get_associated_allocator(__cmpl.completion_handler); 1373 __ex.dispatch(std::move(__cmpl.completion_handler), __alloc); 1374 return __cmpl.result.get(); 1375 } 1376 1377 template
1378 inline 1379 enable_if_t
::value, 1380 __deduced_t<_CompletionToken, void()>> 1381 dispatch(const _Executor& __ex, _CompletionToken&& __token) 1382 { 1383 async_completion<_CompletionToken, void()> __cmpl{__token}; 1384 auto __alloc = net::get_associated_allocator(__cmpl.completion_handler); 1385 __ex.dispatch(net::__make_dispatcher(__cmpl.completion_handler), 1386 __alloc); 1387 return __cmpl.result.get(); 1388 } 1389 1390 template
1391 inline 1392 enable_if_t<__is_exec_context<_ExecutionContext>::value, 1393 __deduced_t<_CompletionToken, void()>> 1394 dispatch(_ExecutionContext& __ctx, _CompletionToken&& __token) 1395 { 1396 return net::dispatch(__ctx.get_executor(), 1397 forward<_CompletionToken>(__token)); 1398 } 1399 1400 // post: 1401 1402 template
1403 inline __deduced_t<_CompletionToken, void()> 1404 post(_CompletionToken&& __token) 1405 { 1406 async_completion<_CompletionToken, void()> __cmpl{__token}; 1407 auto __ex = net::get_associated_executor(__cmpl.completion_handler); 1408 auto __alloc = net::get_associated_allocator(__cmpl.completion_handler); 1409 __ex.post(std::move(__cmpl.completion_handler), __alloc); 1410 return __cmpl.result.get(); 1411 } 1412 1413 template
1414 inline 1415 enable_if_t
::value, 1416 __deduced_t<_CompletionToken, void()>> 1417 post(const _Executor& __ex, _CompletionToken&& __token) 1418 { 1419 async_completion<_CompletionToken, void()> __cmpl{__token}; 1420 auto __alloc = net::get_associated_allocator(__cmpl.completion_handler); 1421 __ex.post(net::__make_dispatcher(__cmpl.completion_handler), __alloc); 1422 return __cmpl.result.get(); 1423 } 1424 1425 template
1426 inline 1427 enable_if_t<__is_exec_context<_ExecutionContext>::value, 1428 __deduced_t<_CompletionToken, void()>> 1429 post(_ExecutionContext& __ctx, _CompletionToken&& __token) 1430 { 1431 return net::post(__ctx.get_executor(), 1432 forward<_CompletionToken>(__token)); 1433 } 1434 1435 // defer: 1436 1437 template
1438 inline __deduced_t<_CompletionToken, void()> 1439 defer(_CompletionToken&& __token) 1440 { 1441 async_completion<_CompletionToken, void()> __cmpl{__token}; 1442 auto __ex = net::get_associated_executor(__cmpl.completion_handler); 1443 auto __alloc = net::get_associated_allocator(__cmpl.completion_handler); 1444 __ex.defer(std::move(__cmpl.completion_handler), __alloc); 1445 return __cmpl.result.get(); 1446 } 1447 1448 template
1449 inline 1450 enable_if_t
::value, 1451 __deduced_t<_CompletionToken, void()>> 1452 defer(const _Executor& __ex, _CompletionToken&& __token) 1453 { 1454 async_completion<_CompletionToken, void()> __cmpl{__token}; 1455 auto __alloc = net::get_associated_allocator(__cmpl.completion_handler); 1456 __ex.defer(net::__make_dispatcher(__cmpl.completion_handler), __alloc); 1457 return __cmpl.result.get(); 1458 } 1459 1460 template
1461 inline 1462 enable_if_t<__is_exec_context<_ExecutionContext>::value, 1463 __deduced_t<_CompletionToken, void()>> 1464 defer(_ExecutionContext& __ctx, _CompletionToken&& __token) 1465 { 1466 return net::defer(__ctx.get_executor(), 1467 forward<_CompletionToken>(__token)); 1468 } 1469 1470 1471 template
1472 class strand 1473 { 1474 public: 1475 // types: 1476 1477 using inner_executor_type = _Executor; 1478 1479 // construct / copy / destroy: 1480 1481 strand(); // TODO make state 1482 1483 explicit strand(_Executor __ex) : _M_inner_ex(__ex) { } // TODO make state 1484 1485 template
1486 strand(allocator_arg_t, const _Alloc& __a, _Executor __ex) 1487 : _M_inner_ex(__ex) { } // TODO make state 1488 1489 strand(const strand& __other) noexcept 1490 : _M_state(__other._M_state), _M_inner_ex(__other._M_inner_ex) { } 1491 1492 strand(strand&& __other) noexcept 1493 : _M_state(std::move(__other._M_state)), 1494 _M_inner_ex(std::move(__other._M_inner_ex)) { } 1495 1496 template
1497 strand(const strand<_OtherExecutor>& __other) noexcept 1498 : _M_state(__other._M_state), _M_inner_ex(__other._M_inner_ex) { } 1499 1500 template
1501 strand(strand<_OtherExecutor>&& __other) noexcept 1502 : _M_state(std::move(__other._M_state)), 1503 _M_inner_ex(std::move(__other._M_inner_ex)) { } 1504 1505 strand& 1506 operator=(const strand& __other) noexcept 1507 { 1508 static_assert(is_copy_assignable<_Executor>::value, 1509 "inner executor type must be CopyAssignable"); 1510 1511 // TODO lock __other 1512 // TODO copy state 1513 _M_inner_ex = __other._M_inner_ex; 1514 return *this; 1515 } 1516 1517 strand& 1518 operator=(strand&& __other) noexcept 1519 { 1520 static_assert(is_move_assignable<_Executor>::value, 1521 "inner executor type must be MoveAssignable"); 1522 1523 // TODO move state 1524 _M_inner_ex = std::move(__other._M_inner_ex); 1525 return *this; 1526 } 1527 1528 template
1529 strand& 1530 operator=(const strand<_OtherExecutor>& __other) noexcept 1531 { 1532 static_assert(is_convertible<_OtherExecutor, _Executor>::value, 1533 "inner executor type must be compatible"); 1534 1535 // TODO lock __other 1536 // TODO copy state 1537 _M_inner_ex = __other._M_inner_ex; 1538 return *this; 1539 } 1540 1541 template
1542 strand& 1543 operator=(strand<_OtherExecutor>&& __other) noexcept 1544 { 1545 static_assert(is_convertible<_OtherExecutor, _Executor>::value, 1546 "inner executor type must be compatible"); 1547 1548 // TODO move state 1549 _M_inner_ex = std::move(__other._M_inner_ex); 1550 return *this; 1551 } 1552 1553 ~strand() 1554 { 1555 // the task queue outlives this object if non-empty 1556 // TODO create circular ref in queue? 1557 } 1558 1559 // strand operations: 1560 1561 inner_executor_type 1562 get_inner_executor() const noexcept 1563 { return _M_inner_ex; } 1564 1565 bool 1566 running_in_this_thread() const noexcept 1567 { return _M_state->running_in_this_thread(); } 1568 1569 execution_context& 1570 context() const noexcept 1571 { return _M_inner_ex.context(); } 1572 1573 void on_work_started() const noexcept { _M_inner_ex.on_work_started(); } 1574 void on_work_finished() const noexcept { _M_inner_ex.on_work_finished(); } 1575 1576 template
1577 void 1578 dispatch(_Func&& __f, const _Alloc& __a) const 1579 { 1580 if (running_in_this_thread()) 1581 decay_t<_Func>{std::forward<_Func>(__f)}(); 1582 else 1583 post(std::forward<_Func>(__f), __a); 1584 } 1585 1586 template
1587 void 1588 post(_Func&& __f, const _Alloc& __a) const; // TODO 1589 1590 template
1591 void 1592 defer(_Func&& __f, const _Alloc& __a) const 1593 { post(std::forward<_Func>(__f), __a); } 1594 1595 private: 1596 friend bool 1597 operator==(const strand& __a, const strand& __b) 1598 { return __a._M_state == __b._M_state; } 1599 1600 // TODO add synchronised queue 1601 struct _State 1602 { 1603 #if defined(_GLIBCXX_HAS_GTHREADS) 1604 bool 1605 running_in_this_thread() const noexcept 1606 { return std::this_thread::get_id() == _M_running_on; } 1607 1608 std::thread::id _M_running_on; 1609 #else 1610 bool running_in_this_thread() const { return true; } 1611 #endif 1612 }; 1613 shared_ptr<_State> _M_state; 1614 _Executor _M_inner_ex; 1615 }; 1616 1617 #if defined(_GLIBCXX_HAS_GTHREADS) 1618 1619 // Completion token for asynchronous operations initiated with use_future. 1620 template
1621 struct __use_future_ct 1622 { 1623 std::tuple<_Func, _Alloc> _M_t; 1624 }; 1625 1626 template
1627 struct __use_future_ct<_Func, std::allocator<_Tp>> 1628 { 1629 _Func _M_f; 1630 }; 1631 1632 template
> 1633 class use_future_t 1634 { 1635 public: 1636 // use_future_t types: 1637 using allocator_type = _ProtoAllocator; 1638 1639 // use_future_t members: 1640 constexpr 1641 use_future_t() 1642 noexcept(is_nothrow_default_constructible<_ProtoAllocator>::value) 1643 : _M_alloc() { } 1644 1645 explicit 1646 use_future_t(const _ProtoAllocator& __a) noexcept : _M_alloc(__a) { } 1647 1648 template
1649 use_future_t<_OtherAllocator> 1650 rebind(const _OtherAllocator& __a) const noexcept 1651 { return use_future_t<_OtherAllocator>(__a); } 1652 1653 allocator_type get_allocator() const noexcept { return _M_alloc; } 1654 1655 template
1656 auto 1657 operator()(_Func&& __f) const 1658 { 1659 using _Token = __use_future_ct
, _ProtoAllocator>; 1660 return _Token{ {std::forward<_Func>(__f), _M_alloc} }; 1661 } 1662 1663 private: 1664 _ProtoAllocator _M_alloc; 1665 }; 1666 1667 template
1668 class use_future_t
> 1669 { 1670 public: 1671 // use_future_t types: 1672 using allocator_type = std::allocator<_Tp>; 1673 1674 // use_future_t members: 1675 constexpr use_future_t() noexcept = default; 1676 1677 explicit 1678 use_future_t(const allocator_type& __a) noexcept { } 1679 1680 template
1681 use_future_t
> 1682 rebind(const std::allocator<_Up>& __a) const noexcept 1683 { return use_future_t
>(__a); } 1684 1685 allocator_type get_allocator() const noexcept { return {}; } 1686 1687 template
1688 auto 1689 operator()(_Func&& __f) const 1690 { 1691 using _Token = __use_future_ct
, allocator_type>; 1692 return _Token{std::forward<_Func>(__f)}; 1693 } 1694 }; 1695 1696 constexpr use_future_t<> use_future = use_future_t<>(); 1697 1698 template
1699 class async_result<__use_future_ct<_Func, _Alloc>, _Res(_Args...)>; 1700 1701 template
1702 struct __use_future_ex; 1703 1704 // Completion handler for asynchronous operations initiated with use_future. 1705 template
1706 struct __use_future_ch 1707 { 1708 template
1709 explicit 1710 __use_future_ch(__use_future_ct<_Func, _Alloc>&& __token) 1711 : _M_f{ std::move(std::get<0>(__token._M_t)) }, 1712 _M_promise{ std::get<1>(__token._M_t) } 1713 { } 1714 1715 template
1716 explicit 1717 __use_future_ch(__use_future_ct<_Func, std::allocator<_Tp>>&& __token) 1718 : _M_f{ std::move(__token._M_f) } 1719 { } 1720 1721 void 1722 operator()(_Args&&... __args) 1723 { 1724 __try 1725 { 1726 _M_promise.set_value(_M_f(std::forward<_Args>(__args)...)); 1727 } 1728 __catch(__cxxabiv1::__forced_unwind&) 1729 { 1730 __throw_exception_again; 1731 } 1732 __catch(...) 1733 { 1734 _M_promise.set_exception(std::current_exception()); 1735 } 1736 } 1737 1738 using __result = result_of_t<_Func(decay_t<_Args>...)>; 1739 1740 future<__result> get_future() { return _M_promise.get_future(); } 1741 1742 private: 1743 template
1744 friend struct __use_future_ex; 1745 1746 _Func _M_f; 1747 mutable promise<__result> _M_promise; 1748 }; 1749 1750 // Specialization of async_result for operations initiated with use_future. 1751 template
1752 class async_result<__use_future_ct<_Func, _Alloc>, _Res(_Args...)> 1753 { 1754 public: 1755 using completion_handler_type = __use_future_ch<_Func, _Args...>; 1756 using return_type = future
; 1757 1758 explicit 1759 async_result(completion_handler_type& __h) 1760 : _M_future(__h.get_future()) 1761 { } 1762 1763 async_result(const async_result&) = delete; 1764 async_result& operator=(const async_result&) = delete; 1765 1766 return_type get() { return std::move(_M_future); } 1767 1768 private: 1769 return_type _M_future; 1770 }; 1771 1772 template
1773 struct __use_future_ex 1774 { 1775 template
1776 __use_future_ex(const _Handler& __h, _Executor __ex) 1777 : _M_t(__h._M_promise, __ex) 1778 { } 1779 1780 template
1781 void 1782 dispatch(_Fn&& __fn) 1783 { 1784 __try 1785 { 1786 std::get<1>(_M_t).dispatch(std::forward<_Fn>(__fn)); 1787 } 1788 __catch(__cxxabiv1::__forced_unwind&) 1789 { 1790 __throw_exception_again; 1791 } 1792 __catch(...) 1793 { 1794 std::get<0>(_M_t).set_exception(std::current_exception()); 1795 } 1796 } 1797 1798 template
1799 void 1800 post(_Fn&& __fn) 1801 { 1802 __try 1803 { 1804 std::get<1>(_M_t).post(std::forward<_Fn>(__fn)); 1805 } 1806 __catch(__cxxabiv1::__forced_unwind&) 1807 { 1808 __throw_exception_again; 1809 } 1810 __catch(...) 1811 { 1812 std::get<0>(_M_t).set_exception(std::current_exception()); 1813 } 1814 } 1815 1816 template
1817 void 1818 defer(_Fn&& __fn) 1819 { 1820 __try 1821 { 1822 std::get<1>(_M_t).defer(std::forward<_Fn>(__fn)); 1823 } 1824 __catch(__cxxabiv1::__forced_unwind&) 1825 { 1826 __throw_exception_again; 1827 } 1828 __catch(...) 1829 { 1830 std::get<0>(_M_t).set_exception(std::current_exception()); 1831 } 1832 } 1833 1834 private: 1835 tuple
&, _Executor> _M_t; 1836 }; 1837 1838 template
1839 struct associated_executor<__use_future_ch<_Func, _Args...>, _Executor> 1840 { 1841 private: 1842 using __handler = __use_future_ch<_Func, _Args...>; 1843 1844 using type = __use_future_ex
; 1845 1846 static type 1847 get(const __handler& __h, const _Executor& __ex) 1848 { return { __h, __ex }; } 1849 }; 1850 1851 #if 0 1852 1853 // [async.use.future.traits] 1854 template
1855 class handler_type
, _Ret(_Args...)> // TODO uglify name 1856 { 1857 template
1858 struct __is_error_result : false_type { }; 1859 1860 template
1861 struct __is_error_result
: true_type { }; 1862 1863 template
1864 struct __is_error_result
: true_type { }; 1865 1866 static exception_ptr 1867 _S_exptr(exception_ptr& __ex) 1868 { return std::move(__ex); } 1869 1870 static exception_ptr 1871 _S_exptr(const error_code& __ec) 1872 { return make_exception_ptr(system_error(__ec)); } 1873 1874 template
1875 struct _Type; 1876 1877 // N == 0 1878 template
1879 struct _Type<_IsError> 1880 { 1881 std::promise
_M_promise; 1882 1883 void 1884 operator()() 1885 { 1886 _M_promise.set_value(); 1887 } 1888 }; 1889 1890 // N == 1, U0 is error_code or exception_ptr 1891 template
1892 struct _Type
1893 { 1894 std::promise
_M_promise; 1895 1896 template
1897 void 1898 operator()(_Arg0&& __a0) 1899 { 1900 if (__a0) 1901 _M_promise.set_exception(_S_exptr(__a0)); 1902 else 1903 _M_promise.set_value(); 1904 } 1905 }; 1906 1907 // N == 1, U0 is not error_code or exception_ptr 1908 template
1909 struct _Type
1910 { 1911 std::promise<_UArg0> _M_promise; 1912 1913 template
1914 void 1915 operator()(_Arg0&& __a0) 1916 { 1917 _M_promise.set_value(std::forward<_Arg0>(__a0)); 1918 } 1919 }; 1920 1921 // N == 2, U0 is error_code or exception_ptr 1922 template
1923 struct _Type
1924 { 1925 std::promise<_UArg1> _M_promise; 1926 1927 template
1928 void 1929 operator()(_Arg0&& __a0, _Arg1&& __a1) 1930 { 1931 if (__a0) 1932 _M_promise.set_exception(_S_exptr(__a0)); 1933 else 1934 _M_promise.set_value(std::forward<_Arg1>(__a1)); 1935 } 1936 }; 1937 1938 // N >= 2, U0 is not error_code or exception_ptr 1939 template
1940 struct _Type
1941 { 1942 static_assert(sizeof...(_UArgs) > 1, "wrong partial specialization"); 1943 1944 std::promise
> _M_promise; 1945 1946 template
1947 void 1948 operator()(_Args&&... __args) 1949 { 1950 _M_promise.set_value( 1951 std::forward_as_tuple(std::forward<_Args>(__args)...)); 1952 } 1953 }; 1954 1955 // N > 2, U0 is error_code or exception_ptr 1956 template
1957 struct _Type
1958 { 1959 static_assert(sizeof...(_UArgs) > 1, "wrong partial specialization"); 1960 1961 std::promise
> _M_promise; 1962 1963 template
1964 void 1965 operator()(_Arg0&& __a0, _Args&&... __args) 1966 { 1967 if (__a0) 1968 _M_promise.set_exception(_S_exptr(__a0)); 1969 else 1970 _M_promise.set_value( 1971 std::forward_as_tuple(std::forward<_Args>(__args)...)); 1972 } 1973 }; 1974 1975 public: 1976 using type = 1977 _Type<__is_error_result<_Args...>::value, decay_t<_Args>...>; 1978 }; 1979 1980 1981 template
1982 struct async_result
, _Ret(_Args...)> 1983 { 1984 using completion_handler_type 1985 = typename handler_type
, _Ret(_Args...)>::type; 1986 1987 using return_type = void; // XXX TODO ???; 1988 1989 explicit 1990 async_result(completion_handler_type& __h) : _M_handler(__h) { } 1991 1992 auto get() { return _M_handler._M_provider.get_future(); } 1993 1994 async_result(const async_result&) = delete; 1995 async_result& operator=(const async_result&) = delete; 1996 1997 return_type get() { return _M_handler._M_promise.get_future(); } 1998 1999 private: 2000 completion_handler_type& _M_handler; 2001 }; 2002 2003 // TODO specialize associated_executor for 2004 // async_result
, Sig>::completion_handler_type 2005 // to use a __use_future_ex 2006 // (probably need to move _Type outside of handler_type so we don't have 2007 // a non-deduced context) 2008 2009 #endif 2010 2011 // [async.packaged.task.specializations] 2012 template
2013 class async_result
, _Signature> 2014 { 2015 public: 2016 using completion_handler_type = packaged_task<_Ret(_Args...)>; 2017 using return_type = future<_Ret>; 2018 2019 explicit 2020 async_result(completion_handler_type& __h) 2021 : _M_future(__h.get_future()) { } 2022 2023 async_result(const async_result&) = delete; 2024 async_result& operator=(const async_result&) = delete; 2025 2026 return_type get() { return std::move(_M_future); } 2027 2028 private: 2029 return_type _M_future; 2030 }; 2031 2032 #endif // _GLIBCXX_HAS_GTHREADS 2033 2034 /// @} 2035 2036 } // namespace v1 2037 } // namespace net 2038 } // namespace experimental 2039 2040 template
2041 struct uses_allocator
2042 : true_type {}; 2043 2044 _GLIBCXX_END_NAMESPACE_VERSION 2045 } // namespace std 2046 2047 #endif // C++14 2048 2049 #endif // _GLIBCXX_EXPERIMENTAL_EXECUTOR
Contact us
|
About us
|
Term of use
|
Copyright © 2000-2024 MyWebUniversity.com ™