]> git.ipfire.org Git - thirdparty/gcc.git/blob - libstdc++-v3/include/std/generator
Update copyright years.
[thirdparty/gcc.git] / libstdc++-v3 / include / std / generator
1 // <generator> -*- C++ -*-
2
3 // Copyright (C) 2023-2024 Free Software Foundation, Inc.
4 //
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 3, or (at your option)
9 // any later version.
10
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
15
16 // Under Section 7 of GPL version 3, you are granted additional
17 // permissions described in the GCC Runtime Library Exception, version
18 // 3.1, as published by the Free Software Foundation.
19
20 // You should have received a copy of the GNU General Public License and
21 // a copy of the GCC Runtime Library Exception along with this program;
22 // see the files COPYING3 and COPYING.RUNTIME respectively. If not, see
23 // <http://www.gnu.org/licenses/>.
24
25 /** @file include/generator
26 * This is a Standard C++ Library header.
27 */
28
29 #ifndef _GLIBCXX_GENERATOR
30 #define _GLIBCXX_GENERATOR
31
32 #include <ranges>
33 #pragma GCC system_header
34
35 #include <bits/c++config.h>
36
37 #define __glibcxx_want_generator
38 #include <bits/version.h>
39
40 #ifdef __cpp_lib_generator // C++ >= 23 && __glibcxx_coroutine
41 #include <new>
42 #include <bits/move.h>
43 #include <bits/ranges_util.h>
44 #include <bits/elements_of.h>
45 #include <bits/uses_allocator.h>
46 #include <bits/exception_ptr.h>
47 #include <cstddef>
48 #include <cstdint>
49 #include <cstring>
50 #include <coroutine>
51
52 #include <type_traits>
53 #include <variant>
54 #include <concepts>
55
56 #if _GLIBCXX_HOSTED
57 # include <bits/memory_resource.h>
58 #endif // HOSTED
59
60 namespace std _GLIBCXX_VISIBILITY(default)
61 {
62 _GLIBCXX_BEGIN_NAMESPACE_VERSION
63
64 /**
65 * @defgroup generator_coros Range generator coroutines
66 * @addtogroup ranges
67 * @since C++23
68 * @{
69 */
70
71 /** @brief A range specified using a yielding coroutine.
72 *
73 * `std::generator` is a utility class for defining ranges using coroutines
74 * that yield elements as a range. Generator coroutines are synchronous.
75 *
76 * @headerfile generator
77 * @since C++23
78 */
79 template<typename _Ref, typename _V = void, typename _Alloc = void>
80 class generator;
81
82 /// @cond undocumented
83 namespace __gen
84 {
85 /// _Reference type for a generator whose reference (first argument) and
86 /// value (second argument) types are _Ref and _V.
87 template<typename _Ref, typename _Val>
88 using _Reference_t = __conditional_t<is_void_v<_Val>,
89 _Ref&&, _Ref>;
90
91 /// Type yielded by a generator whose _Reference type is _Reference.
92 template<typename _Reference>
93 using _Yield_t = __conditional_t<is_reference_v<_Reference>,
94 _Reference,
95 const _Reference&>;
96
97 /// _Yield_t * _Reference_t
98 template<typename _Ref, typename _Val>
99 using _Yield2_t = _Yield_t<_Reference_t<_Ref, _Val>>;
100
101 template<typename> constexpr bool __is_generator = false;
102 template<typename _Val, typename _Ref, typename _Alloc>
103 constexpr bool __is_generator<std::generator<_Val, _Ref, _Alloc>> = true;
104
105 /// Allocator and value type erased generator promise type.
106 /// \tparam _Yielded The corresponding generators yielded type.
107 template<typename _Yielded>
108 class _Promise_erased
109 {
110 static_assert(is_reference_v<_Yielded>);
111 using _Yielded_deref = remove_reference_t<_Yielded>;
112 using _Yielded_decvref = remove_cvref_t<_Yielded>;
113 using _ValuePtr = add_pointer_t<_Yielded>;
114 using _Coro_handle = std::coroutine_handle<_Promise_erased>;
115
116 template<typename, typename, typename>
117 friend class std::generator;
118
119 template<typename _Gen>
120 struct _Recursive_awaiter;
121 template<typename>
122 friend struct _Recursive_awaiter;
123 struct _Copy_awaiter;
124 struct _Subyield_state;
125 struct _Final_awaiter;
126 public:
127 suspend_always
128 initial_suspend() const noexcept
129 { return {}; }
130
131 suspend_always
132 yield_value(_Yielded __val) noexcept
133 {
134 _M_bottom_value() = ::std::addressof(__val);
135 return {};
136 }
137
138 auto
139 yield_value(const _Yielded_deref& __val)
140 noexcept (is_nothrow_constructible_v<_Yielded_decvref,
141 const _Yielded_deref&>)
142 requires (is_rvalue_reference_v<_Yielded>
143 && constructible_from<_Yielded_decvref,
144 const _Yielded_deref&>)
145 { return _Copy_awaiter(__val, _M_bottom_value()); }
146
147 template<typename _R2, typename _V2, typename _A2, typename _U2>
148 requires std::same_as<_Yield2_t<_R2, _V2>, _Yielded>
149 auto
150 yield_value(ranges::elements_of<generator<_R2, _V2, _A2>&&, _U2> __r)
151 noexcept
152 { return _Recursive_awaiter { std::move(__r.range) }; }
153
154 template<ranges::input_range _R, typename _Alloc>
155 requires convertible_to<ranges::range_reference_t<_R>, _Yielded>
156 auto
157 yield_value(ranges::elements_of<_R, _Alloc> __r)
158 noexcept
159 {
160 auto __n = [] (allocator_arg_t, _Alloc,
161 ranges::iterator_t<_R> __i,
162 ranges::sentinel_t<_R> __s)
163 -> generator<_Yielded, ranges::range_value_t<_R>, _Alloc> {
164 for (; __i != __s; ++__i)
165 co_yield static_cast<_Yielded>(*__i);
166 };
167 return yield_value(ranges::elements_of(__n(allocator_arg,
168 __r.allocator,
169 ranges::begin(__r.range),
170 ranges::end(__r.range))));
171 }
172
173
174 _Final_awaiter
175 final_suspend() noexcept
176 { return {}; }
177
178 void
179 unhandled_exception()
180 {
181 // To get to this point, this coroutine must have been active. In that
182 // case, it must be the top of the stack. The current coroutine is
183 // the sole entry of the stack iff it is both the top and the bottom. As
184 // it is the top implicitly in this context it will be the sole entry iff
185 // it is the bottom.
186 if (_M_nest._M_is_bottom())
187 throw;
188 else
189 this->_M_except = std::current_exception();
190 }
191
192 void await_transform() = delete;
193 void return_void() const noexcept {}
194
195 private:
196 _ValuePtr&
197 _M_bottom_value() noexcept
198 { return _M_nest._M_bottom_value(*this); }
199
200 _ValuePtr&
201 _M_value() noexcept
202 { return _M_nest._M_value(*this); }
203
204 _Subyield_state _M_nest;
205 std::exception_ptr _M_except;
206 };
207
208 template<typename _Yielded>
209 struct _Promise_erased<_Yielded>::_Subyield_state
210 {
211 struct _Frame
212 {
213 _Coro_handle _M_bottom;
214 _Coro_handle _M_parent;
215 };
216
217 struct _Bottom_frame
218 {
219 _Coro_handle _M_top;
220 _ValuePtr _M_value = nullptr;
221 };
222
223 std::variant<
224 _Bottom_frame,
225 _Frame
226 > _M_stack;
227
228 bool
229 _M_is_bottom() const noexcept
230 { return !std::holds_alternative<_Frame>(this->_M_stack); }
231
232 _Coro_handle&
233 _M_top() noexcept
234 {
235 if (auto __f = std::get_if<_Frame>(&this->_M_stack))
236 return __f->_M_bottom.promise()._M_nest._M_top();
237
238 auto __bf = std::get_if<_Bottom_frame>(&this->_M_stack);
239 __glibcxx_assert(__bf);
240 return __bf->_M_top;
241 }
242
243 void
244 _M_push(_Coro_handle __current, _Coro_handle __subyield) noexcept
245 {
246 __glibcxx_assert(&__current.promise()._M_nest == this);
247 __glibcxx_assert(this->_M_top() == __current);
248
249 __subyield.promise()._M_nest._M_jump_in(__current, __subyield);
250 }
251
252 std::coroutine_handle<>
253 _M_pop() noexcept
254 {
255 if (auto __f = std::get_if<_Frame>(&this->_M_stack))
256 {
257 // We aren't a bottom coroutine. Restore the parent to the top
258 // and resume.
259 auto __p = this->_M_top() = __f->_M_parent;
260 return __p;
261 }
262 else
263 // Otherwise, there's nothing to resume.
264 return std::noop_coroutine();
265 }
266
267 void
268 _M_jump_in(_Coro_handle __rest, _Coro_handle __new) noexcept
269 {
270 __glibcxx_assert(&__new.promise()._M_nest == this);
271 __glibcxx_assert(this->_M_is_bottom());
272 // We're bottom. We're also top of top is unset (note that this is
273 // not true if something was added to the coro stack and then popped,
274 // but in that case we can't possibly be yielded from, as it would
275 // require rerunning begin()).
276 __glibcxx_assert(!this->_M_top());
277
278 auto& __rn = __rest.promise()._M_nest;
279 __rn._M_top() = __new;
280
281 // Presume we're the second frame...
282 auto __bott = __rest;
283 if (auto __f = std::get_if<_Frame>(&__rn._M_stack))
284 // But, if we aren't, get the actual bottom. We're only the second
285 // frame if our parent is the bottom frame, i.e. it doesn't have a
286 // _Frame member.
287 __bott = __f->_M_bottom;
288
289 this->_M_stack = _Frame {
290 ._M_bottom = __bott,
291 ._M_parent = __rest
292 };
293 }
294
295 _ValuePtr&
296 _M_bottom_value(_Promise_erased& __current) noexcept
297 {
298 __glibcxx_assert(&__current._M_nest == this);
299 if (auto __bf = std::get_if<_Bottom_frame>(&this->_M_stack))
300 return __bf->_M_value;
301 auto __f = std::get_if<_Frame>(&this->_M_stack);
302 __glibcxx_assert(__f);
303 auto& __p = __f->_M_bottom.promise();
304 return __p._M_nest._M_value(__p);
305 }
306
307 _ValuePtr&
308 _M_value(_Promise_erased& __current) noexcept
309 {
310 __glibcxx_assert(&__current._M_nest == this);
311 auto __bf = std::get_if<_Bottom_frame>(&this->_M_stack);
312 __glibcxx_assert(__bf);
313 return __bf->_M_value;
314 }
315 };
316
317 template<typename _Yielded>
318 struct _Promise_erased<_Yielded>::_Final_awaiter
319 {
320 bool await_ready() noexcept
321 { return false; }
322
323 template<typename _Promise>
324 auto await_suspend(std::coroutine_handle<_Promise> __c) noexcept
325 {
326 static_assert(is_pointer_interconvertible_base_of_v<
327 _Promise_erased, _Promise>);
328
329 auto& __n = __c.promise()._M_nest;
330 return __n._M_pop();
331 }
332
333 void await_resume() noexcept {}
334 };
335
336 template<typename _Yielded>
337 struct _Promise_erased<_Yielded>::_Copy_awaiter
338 {
339 _Yielded_decvref _M_value;
340 _ValuePtr& _M_bottom_value;
341
342 constexpr bool await_ready() noexcept
343 { return false; }
344
345 template<typename _Promise>
346 void await_suspend(std::coroutine_handle<_Promise>) noexcept
347 {
348 static_assert(is_pointer_interconvertible_base_of_v<
349 _Promise_erased, _Promise>);
350 _M_bottom_value = ::std::addressof(_M_value);
351 }
352
353 constexpr void
354 await_resume() const noexcept
355 {}
356 };
357
358 template<typename _Yielded>
359 template<typename _Gen>
360 struct _Promise_erased<_Yielded>::_Recursive_awaiter
361 {
362 _Gen _M_gen;
363 static_assert(__is_generator<_Gen>);
364 static_assert(std::same_as<typename _Gen::yielded, _Yielded>);
365
366 _Recursive_awaiter(_Gen __gen) noexcept
367 : _M_gen(std::move(__gen))
368 { this->_M_gen._M_mark_as_started(); }
369
370 constexpr bool
371 await_ready() const noexcept
372 { return false; }
373
374
375 template<typename _Promise>
376 std::coroutine_handle<>
377 await_suspend(std::coroutine_handle<_Promise> __p) noexcept
378 {
379 static_assert(is_pointer_interconvertible_base_of_v<
380 _Promise_erased, _Promise>);
381
382 auto __c = _Coro_handle::from_address(__p.address());
383 auto __t = _Coro_handle::from_address(this->_M_gen._M_coro.address());
384 __p.promise()._M_nest._M_push(__c, __t);
385 return __t;
386 }
387
388 void await_resume()
389 {
390 if (auto __e = _M_gen._M_coro.promise()._M_except)
391 std::rethrow_exception(__e);
392 }
393 };
394
395 struct _Alloc_block
396 {
397 alignas(__STDCPP_DEFAULT_NEW_ALIGNMENT__)
398 char _M_data[__STDCPP_DEFAULT_NEW_ALIGNMENT__];
399
400 static auto
401 _M_cnt(std::size_t __sz) noexcept
402 {
403 auto __blksz = sizeof(_Alloc_block);
404 return (__sz + __blksz - 1) / __blksz;
405 }
406 };
407
408 template<typename _A>
409 concept _Stateless_alloc = (allocator_traits<_A>::is_always_equal::value
410 && default_initializable<_A>);
411
412 template<typename _Alloc>
413 class _Promise_alloc
414 {
415 using _ATr = allocator_traits<_Alloc>;
416 using _Rebound = typename _ATr::template rebind_alloc<_Alloc_block>;
417 using _Rebound_ATr = typename _ATr
418 ::template rebind_traits<_Alloc_block>;
419 static_assert(is_pointer_v<typename _Rebound_ATr::pointer>,
420 "Must use allocators for true pointers with generators");
421
422 static auto
423 _M_alloc_address(std::uintptr_t __fn, std::uintptr_t __fsz) noexcept
424 {
425 auto __an = __fn + __fsz;
426 auto __ba = alignof(_Rebound);
427 return reinterpret_cast<_Rebound*>(((__an + __ba - 1) / __ba) * __ba);
428 }
429
430 static auto
431 _M_alloc_size(std::size_t __csz) noexcept
432 {
433 auto __ba = alignof(_Rebound);
434 // Our desired layout is placing the coroutine frame, then pad out to
435 // align, then place the allocator. The total size of that is the
436 // size of the coroutine frame, plus up to __ba bytes, plus the size
437 // of the allocator.
438 return __csz + __ba + sizeof(_Rebound);
439 }
440
441 static void*
442 _M_allocate(_Rebound __b, std::size_t __csz)
443 {
444 if constexpr (_Stateless_alloc<_Rebound>)
445 // Only need room for the coroutine.
446 return __b.allocate(_Alloc_block::_M_cnt(__csz));
447 else
448 {
449 auto __nsz = _Alloc_block::_M_cnt(_M_alloc_size(__csz));
450 auto __f = __b.allocate(__nsz);
451 auto __fn = reinterpret_cast<std::uintptr_t>(__f);
452 auto __an = _M_alloc_address(__fn, __csz);
453 ::new (__an) _Rebound(std::move(__b));
454 return __f;
455 }
456 }
457
458 public:
459 void*
460 operator new(std::size_t __sz)
461 requires default_initializable<_Rebound> // _Alloc is non-void
462 { return _M_allocate({}, __sz); }
463
464 template<typename _Na, typename... _Args>
465 void*
466 operator new(std::size_t __sz,
467 allocator_arg_t, const _Na& __na,
468 const _Args&...)
469 requires convertible_to<const _Na&, _Alloc>
470 {
471 return _M_allocate(static_cast<_Rebound>(static_cast<_Alloc>(__na)),
472 __sz);
473 }
474
475 template<typename _This, typename _Na, typename... _Args>
476 void*
477 operator new(std::size_t __sz,
478 const _This&,
479 allocator_arg_t, const _Na& __na,
480 const _Args&...)
481 requires convertible_to<const _Na&, _Alloc>
482 {
483 return _M_allocate(static_cast<_Rebound>(static_cast<_Alloc>(__na)),
484 __sz);
485 }
486
487 void
488 operator delete(void* __ptr, std::size_t __csz) noexcept
489 {
490 if constexpr (_Stateless_alloc<_Rebound>)
491 {
492 _Rebound __b;
493 return __b.deallocate(reinterpret_cast<_Alloc_block*>(__ptr),
494 _Alloc_block::_M_cnt(__csz));
495 }
496 else
497 {
498 auto __nsz = _Alloc_block::_M_cnt(_M_alloc_size(__csz));
499 auto __fn = reinterpret_cast<std::uintptr_t>(__ptr);
500 auto __an = _M_alloc_address(__fn, __csz);
501 _Rebound __b(std::move(*__an));
502 __an->~_Rebound();
503 __b.deallocate(reinterpret_cast<_Alloc_block*>(__ptr), __nsz);
504 }
505 }
506 };
507
508 template<>
509 class _Promise_alloc<void>
510 {
511 using _Dealloc_fn = void (*)(void*, std::size_t);
512
513 static auto
514 _M_dealloc_address(std::uintptr_t __fn, std::uintptr_t __fsz) noexcept
515 {
516 auto __an = __fn + __fsz;
517 auto __ba = alignof(_Dealloc_fn);
518 auto __aligned = ((__an + __ba - 1) / __ba) * __ba;
519 return reinterpret_cast<_Dealloc_fn*>(__aligned);
520 }
521
522 template<typename _Rebound>
523 static auto
524 _M_alloc_address(std::uintptr_t __fn, std::uintptr_t __fsz) noexcept
525 requires (!_Stateless_alloc<_Rebound>)
526 {
527 auto __ba = alignof(_Rebound);
528 auto __da = _M_dealloc_address(__fn, __fsz);
529 auto __aan = reinterpret_cast<std::uintptr_t>(__da);
530 __aan += sizeof(_Dealloc_fn);
531 auto __aligned = ((__aan + __ba - 1) / __ba) * __ba;
532 return reinterpret_cast<_Rebound*>(__aligned);
533 }
534
535 template<typename _Rebound>
536 static auto
537 _M_alloc_size(std::size_t __csz) noexcept
538 {
539 // This time, we want the coroutine frame, then the deallocator
540 // pointer, then the allocator itself, if any.
541 std::size_t __aa = 0;
542 std::size_t __as = 0;
543 if constexpr (!std::same_as<_Rebound, void>)
544 {
545 __aa = alignof(_Rebound);
546 __as = sizeof(_Rebound);
547 }
548 auto __ba = __aa + alignof(_Dealloc_fn);
549 return __csz + __ba + __as + sizeof(_Dealloc_fn);
550 }
551
552 template<typename _Rebound>
553 static void
554 _M_deallocator(void* __ptr, std::size_t __csz) noexcept
555 {
556 auto __asz = _M_alloc_size<_Rebound>(__csz);
557 auto __nblk = _Alloc_block::_M_cnt(__asz);
558
559 if constexpr (_Stateless_alloc<_Rebound>)
560 {
561 _Rebound __b;
562 __b.deallocate(reinterpret_cast<_Alloc_block*>(__ptr), __nblk);
563 }
564 else
565 {
566 auto __fn = reinterpret_cast<std::uintptr_t>(__ptr);
567 auto __an = _M_alloc_address<_Rebound>(__fn, __csz);
568 _Rebound __b(std::move(*__an));
569 __an->~_Rebound();
570 __b.deallocate(reinterpret_cast<_Alloc_block*>(__ptr), __nblk);
571 }
572 }
573
574 template<typename _Na>
575 static void*
576 _M_allocate(const _Na& __na, std::size_t __csz)
577 {
578 using _Rebound = typename std::allocator_traits<_Na>
579 ::template rebind_alloc<_Alloc_block>;
580 using _Rebound_ATr = typename std::allocator_traits<_Na>
581 ::template rebind_traits<_Alloc_block>;
582
583 static_assert(is_pointer_v<typename _Rebound_ATr::pointer>,
584 "Must use allocators for true pointers with generators");
585
586 _Dealloc_fn __d = &_M_deallocator<_Rebound>;
587 auto __b = static_cast<_Rebound>(__na);
588 auto __asz = _M_alloc_size<_Rebound>(__csz);
589 auto __nblk = _Alloc_block::_M_cnt(__asz);
590 void* __p = __b.allocate(__nblk);
591 auto __pn = reinterpret_cast<std::uintptr_t>(__p);
592 *_M_dealloc_address(__pn, __csz) = __d;
593 if constexpr (!_Stateless_alloc<_Rebound>)
594 {
595 auto __an = _M_alloc_address<_Rebound>(__pn, __csz);
596 ::new (__an) _Rebound(std::move(__b));
597 }
598 return __p;
599 }
600 public:
601 void*
602 operator new(std::size_t __sz)
603 {
604 auto __nsz = _M_alloc_size<void>(__sz);
605 _Dealloc_fn __d = [] (void* __ptr, std::size_t __sz)
606 {
607 ::operator delete(__ptr, _M_alloc_size<void>(__sz));
608 };
609 auto __p = ::operator new(__nsz);
610 auto __pn = reinterpret_cast<uintptr_t>(__p);
611 *_M_dealloc_address(__pn, __sz) = __d;
612 return __p;
613 }
614
615 template<typename _Na, typename... _Args>
616 void*
617 operator new(std::size_t __sz,
618 allocator_arg_t, const _Na& __na,
619 const _Args&...)
620 { return _M_allocate(__na, __sz); }
621
622 template<typename _This, typename _Na, typename... _Args>
623 void*
624 operator new(std::size_t __sz,
625 const _This&,
626 allocator_arg_t, const _Na& __na,
627 const _Args&...)
628 { return _M_allocate(__na, __sz); }
629
630 void
631 operator delete(void* __ptr, std::size_t __sz) noexcept
632 {
633 _Dealloc_fn __d;
634 auto __pn = reinterpret_cast<uintptr_t>(__ptr);
635 __d = *_M_dealloc_address(__pn, __sz);
636 __d(__ptr, __sz);
637 }
638 };
639
640 template<typename _Tp>
641 concept _Cv_unqualified_object = is_object_v<_Tp>
642 && same_as<_Tp, remove_cv_t<_Tp>>;
643 } // namespace __gen
644 /// @endcond
645
646 template<typename _Ref, typename _V, typename _Alloc>
647 class generator
648 : public ranges::view_interface<generator<_Ref, _V, _Alloc>>
649 {
650 using _Value = __conditional_t<is_void_v<_V>, remove_cvref_t<_Ref>, _V>;
651 static_assert(__gen::_Cv_unqualified_object<_Value>,
652 "Generator value must be a cv-unqualified object type");
653 using _Reference = __gen::_Reference_t<_Ref, _V>;
654 static_assert(is_reference_v<_Reference>
655 || (__gen::_Cv_unqualified_object<_Reference>
656 && copy_constructible<_Reference>),
657 "Generator reference type must be either a cv-unqualified "
658 "object type that is trivially constructible or a "
659 "reference type");
660
661 using _RRef = __conditional_t<
662 is_reference_v<_Reference>,
663 remove_reference_t<_Reference>&&,
664 _Reference>;
665
666 /* Required to model indirectly_readable, and input_iterator. */
667 static_assert(common_reference_with<_Reference&&, _Value&&>);
668 static_assert(common_reference_with<_Reference&&, _RRef&&>);
669 static_assert(common_reference_with<_RRef&&, const _Value&>);
670
671 using _Yielded = __gen::_Yield_t<_Reference>;
672 using _Erased_promise = __gen::_Promise_erased<_Yielded>;
673
674 struct _Iterator;
675
676 friend _Erased_promise;
677 friend struct _Erased_promise::_Subyield_state;
678 public:
679 using yielded = _Yielded;
680
681 struct promise_type : _Erased_promise, __gen::_Promise_alloc<_Alloc>
682 {
683 generator get_return_object() noexcept
684 { return { coroutine_handle<promise_type>::from_promise(*this) }; }
685 };
686
687 static_assert(is_pointer_interconvertible_base_of_v<_Erased_promise,
688 promise_type>);
689
690 generator(const generator&) = delete;
691
692 generator(generator&& __other) noexcept
693 : _M_coro(std::__exchange(__other._M_coro, nullptr)),
694 _M_began(std::__exchange(__other._M_began, false))
695 {}
696
697 ~generator()
698 {
699 if (auto& __c = this->_M_coro)
700 __c.destroy();
701 }
702
703 generator&
704 operator=(generator __other) noexcept
705 {
706 swap(__other._M_coro, this->_M_coro);
707 swap(__other._M_began, this->_M_began);
708 }
709
710 _Iterator
711 begin()
712 {
713 this->_M_mark_as_started();
714 auto __h = _Coro_handle::from_promise(_M_coro.promise());
715 __h.promise()._M_nest._M_top() = __h;
716 return { __h };
717 }
718
719 default_sentinel_t
720 end() const noexcept
721 { return default_sentinel; }
722
723 private:
724 using _Coro_handle = std::coroutine_handle<_Erased_promise>;
725
726 generator(coroutine_handle<promise_type> __coro) noexcept
727 : _M_coro { move(__coro) }
728 {}
729
730 void
731 _M_mark_as_started() noexcept
732 {
733 __glibcxx_assert(!this->_M_began);
734 this->_M_began = true;
735 }
736
737 coroutine_handle<promise_type> _M_coro;
738 bool _M_began = false;
739 };
740
741 template<class _Ref, class _V, class _Alloc>
742 struct generator<_Ref, _V, _Alloc>::_Iterator
743 {
744 using value_type = _Value;
745 using difference_type = ptrdiff_t;
746
747 friend bool
748 operator==(const _Iterator& __i, default_sentinel_t) noexcept
749 { return __i._M_coro.done(); }
750
751 friend class generator;
752
753 _Iterator(_Iterator&& __o) noexcept
754 : _M_coro(std::__exchange(__o._M_coro, {}))
755 {}
756
757 _Iterator&
758 operator=(_Iterator&& __o) noexcept
759 {
760 this->_M_coro = std::__exchange(__o._M_coro, {});
761 return *this;
762 }
763
764 _Iterator&
765 operator++()
766 {
767 _M_next();
768 return *this;
769 }
770
771 void
772 operator++(int)
773 { this->operator++(); }
774
775 yielded
776 operator*()
777 const noexcept(is_nothrow_move_constructible_v<_Reference>)
778 {
779 auto& __p = this->_M_coro.promise();
780 return static_cast<yielded>(*__p._M_value());
781 }
782
783 private:
784 friend class generator;
785
786 _Iterator(_Coro_handle __g)
787 : _M_coro { __g }
788 { this->_M_next(); }
789
790 void _M_next()
791 {
792 auto& __t = this->_M_coro.promise()._M_nest._M_top();
793 __t.resume();
794 }
795
796 _Coro_handle _M_coro;
797 };
798
799 /// @}
800
801 #if _GLIBCXX_HOSTED
802 namespace pmr {
803 template<typename _Ref, typename _Val = void>
804 using generator = std::generator<_Ref, _Val, polymorphic_allocator<std::byte>>;
805 }
806 #endif // HOSTED
807
808 _GLIBCXX_END_NAMESPACE_VERSION
809 } // namespace std
810 #endif // __cpp_lib_generator
811
812 #endif // _GLIBCXX_GENERATOR