1 // Allocators -*- C++ -*-
3 // Copyright (C) 2001 Free Software Foundation, Inc.
5 // This file is part of the GNU ISO C++ Library. This library is free
6 // software; you can redistribute it and/or modify it under the
7 // terms of the GNU General Public License as published by the
8 // Free Software Foundation; either version 2, or (at your option)
11 // This library is distributed in the hope that it will be useful,
12 // but WITHOUT ANY WARRANTY; without even the implied warranty of
13 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 // GNU General Public License for more details.
16 // You should have received a copy of the GNU General Public License along
17 // with this library; see the file COPYING. If not, write to the Free
18 // Software Foundation, 59 Temple Place - Suite 330, Boston, MA 02111-1307,
21 // As a special exception, you may use this file as part of a free software
22 // library without restriction. Specifically, if other files instantiate
23 // templates or use macros or inline functions from this file, or you compile
24 // this file and link it with other files to produce an executable, this
25 // file does not by itself cause the resulting executable to be covered by
26 // the GNU General Public License. This exception does not however
27 // invalidate any other reasons why the executable file might be covered by
28 // the GNU General Public License.
31 * Copyright (c) 1996-1997
32 * Silicon Graphics Computer Systems, Inc.
34 * Permission to use, copy, modify, distribute and sell this software
35 * and its documentation for any purpose is hereby granted without fee,
36 * provided that the above copyright notice appear in all copies and
37 * that both that copyright notice and this permission notice appear
38 * in supporting documentation. Silicon Graphics makes no
39 * representations about the suitability of this software for any
40 * purpose. It is provided "as is" without express or implied warranty.
44 * This is an internal header file, included by other library headers.
45 * You should not attempt to use it directly.
48 #ifndef __GLIBCPP_INTERNAL_ALLOC_H
49 #define __GLIBCPP_INTERNAL_ALLOC_H
52 * @defgroup Allocators Memory Allocators
54 * stl_alloc.h implements some node allocators. These are NOT the same as
55 * allocators in the C++ standard, nor in the original H-P STL. They do not
56 * encapsulate different pointer types; we assume that there is only one
57 * pointer type. The C++ standard allocators are intended to allocate
58 * individual objects, not pools or arenas.
60 * In this file allocators are of two different styles: "standard" and
61 * "SGI" (quotes included). "Standard" allocators conform to 20.4. "SGI"
62 * allocators differ in AT LEAST the following ways (add to this list as you
65 * - "Standard" allocate() takes two parameters (n_count,hint=0) but "SGI"
66 * allocate() takes one paramter (n_size).
67 * - Likewise, "standard" deallocate()'s argument is a count, but in "SGI"
69 * - max_size(), construct(), and destroy() are missing in "SGI" allocators.
70 * - reallocate(p,oldsz,newsz) is added in "SGI", and behaves as
71 * if p=realloc(p,newsz).
73 * "SGI" allocators may be wrapped in __allocator to convert the interface
74 * into a "standard" one.
77 * The canonical description of these classes is in docs/html/ext/howto.html
78 * or online at http://gcc.gnu.org/onlinedocs/libstdc++/ext/howto.html#3
85 #include <bits/functexcept.h> // For __throw_bad_alloc
86 #include <bits/stl_threads.h>
92 * A new-based allocator, as required by the standard. Allocation and
93 * deallocation forward to global new and delete. "SGI" style, minus
96 * (See @link Allocators allocators info @endlink for more.)
103 { return ::operator new(__n
); }
106 deallocate(void* __p
, size_t)
107 { ::operator delete(__p
); }
113 * A malloc-based allocator. Typically slower than the
114 * __default_alloc_template (below). Typically thread-safe and more
115 * storage efficient. The template argument is unused and is only present
116 * to permit multiple instantiations (but see __default_alloc_template
117 * for caveats). "SGI" style, plus __set_malloc_handler for OOM conditions.
119 * (See @link Allocators allocators info @endlink for more.)
121 template <int __inst
>
122 class __malloc_alloc_template
125 static void* _S_oom_malloc(size_t);
126 static void* _S_oom_realloc(void*, size_t);
127 static void (* __malloc_alloc_oom_handler
)();
133 void* __result
= malloc(__n
);
134 if (0 == __result
) __result
= _S_oom_malloc(__n
);
139 deallocate(void* __p
, size_t /* __n */)
143 reallocate(void* __p
, size_t /* old_sz */, size_t __new_sz
)
145 void* __result
= realloc(__p
, __new_sz
);
146 if (0 == __result
) __result
= _S_oom_realloc(__p
, __new_sz
);
150 static void (* __set_malloc_handler(void (*__f
)()))()
152 void (* __old
)() = __malloc_alloc_oom_handler
;
153 __malloc_alloc_oom_handler
= __f
;
158 // malloc_alloc out-of-memory handling
159 template <int __inst
>
160 void (* __malloc_alloc_template
<__inst
>::__malloc_alloc_oom_handler
)() = 0;
162 template <int __inst
>
164 __malloc_alloc_template
<__inst
>::_S_oom_malloc(size_t __n
)
166 void (* __my_malloc_handler
)();
171 __my_malloc_handler
= __malloc_alloc_oom_handler
;
172 if (0 == __my_malloc_handler
)
173 std::__throw_bad_alloc();
174 (*__my_malloc_handler
)();
175 __result
= malloc(__n
);
181 template <int __inst
>
183 __malloc_alloc_template
<__inst
>::_S_oom_realloc(void* __p
, size_t __n
)
185 void (* __my_malloc_handler
)();
190 __my_malloc_handler
= __malloc_alloc_oom_handler
;
191 if (0 == __my_malloc_handler
)
192 std::__throw_bad_alloc();
193 (*__my_malloc_handler
)();
194 __result
= realloc(__p
, __n
);
201 // Determines the underlying allocator choice for the node allocator.
203 typedef __malloc_alloc_template
<0> __mem_interface
;
205 typedef __new_alloc __mem_interface
;
211 * This is used primarily (only?) in _Alloc_traits and other places to
212 * help provide the _Alloc_type typedef.
214 * This is neither "standard"-conforming nor "SGI". The _Alloc parameter
215 * must be "SGI" style.
217 * (See @link Allocators allocators info @endlink for more.)
219 template<class _Tp
, class _Alloc
>
223 static _Tp
* allocate(size_t __n
)
224 { return 0 == __n
? 0 : (_Tp
*) _Alloc::allocate(__n
* sizeof (_Tp
)); }
226 static _Tp
* allocate()
227 { return (_Tp
*) _Alloc::allocate(sizeof (_Tp
)); }
229 static void deallocate(_Tp
* __p
, size_t __n
)
230 { if (0 != __n
) _Alloc::deallocate(__p
, __n
* sizeof (_Tp
)); }
232 static void deallocate(_Tp
* __p
)
233 { _Alloc::deallocate(__p
, sizeof (_Tp
)); }
239 * An adaptor for an underlying allocator (_Alloc) to check the size
240 * arguments for debugging. Errors are reported using assert; these
241 * checks can be disabled via NDEBUG, but the space penalty is still
242 * paid, therefore it is far better to just use the underlying allocator
243 * by itelf when no checking is desired.
245 * "There is some evidence that this can confuse Purify." - SGI comment
247 * This adaptor is "SGI" style. The _Alloc parameter must also be "SGI".
249 * (See @link Allocators allocators info @endlink for more.)
251 template <class _Alloc
>
255 enum {_S_extra
= 8}; // Size of space used to store size. Note that this
256 // must be large enough to preserve alignment.
260 static void* allocate(size_t __n
)
262 char* __result
= (char*)_Alloc::allocate(__n
+ (int) _S_extra
);
263 *(size_t*)__result
= __n
;
264 return __result
+ (int) _S_extra
;
267 static void deallocate(void* __p
, size_t __n
)
269 char* __real_p
= (char*)__p
- (int) _S_extra
;
270 assert(*(size_t*)__real_p
== __n
);
271 _Alloc::deallocate(__real_p
, __n
+ (int) _S_extra
);
274 static void* reallocate(void* __p
, size_t __old_sz
, size_t __new_sz
)
276 char* __real_p
= (char*)__p
- (int) _S_extra
;
277 assert(*(size_t*)__real_p
== __old_sz
);
278 char* __result
= (char*)
279 _Alloc::reallocate(__real_p
, __old_sz
+ (int) _S_extra
,
280 __new_sz
+ (int) _S_extra
);
281 *(size_t*)__result
= __new_sz
;
282 return __result
+ (int) _S_extra
;
289 typedef __mem_interface __alloc
;
290 typedef __mem_interface __single_client_alloc
;
297 * Default node allocator. "SGI" style. Uses __mem_interface for its
298 * underlying requests (and makes as few requests as possible).
299 * **** Currently __mem_interface is always __new_alloc, never __malloc*.
301 * Important implementation properties:
302 * 1. If the clients request an object of size > _MAX_BYTES, the resulting
303 * object will be obtained directly from the underlying __mem_interface.
304 * 2. In all other cases, we allocate an object of size exactly
305 * _S_round_up(requested_size). Thus the client has enough size
306 * information that we can return the object to the proper free list
307 * without permanently losing part of the object.
309 * The first template parameter specifies whether more than one thread may
310 * use this allocator. It is safe to allocate an object from one instance
311 * of a default_alloc and deallocate it with another one. This effectively
312 * transfers its ownership to the second one. This may have undesirable
313 * effects on reference locality.
315 * The second parameter is unused and serves only to allow the creation of
316 * multiple default_alloc instances. Note that containers built on different
317 * allocator instances have different types, limiting the utility of this
318 * approach. If you do not wish to share the free lists with the main
319 * default_alloc instance, instantiate this with a non-zero __inst.
322 * (See @link Allocators allocators info @endlink for more.)
324 template <bool __threads
, int __inst
>
325 class __default_alloc_template
330 enum {_MAX_BYTES
= 128};
331 enum {_NFREELISTS
= _MAX_BYTES
/ _ALIGN
};
334 _S_round_up(size_t __bytes
)
335 { return (((__bytes
) + (size_t) _ALIGN
-1) & ~((size_t) _ALIGN
- 1)); }
338 union _Obj
* _M_free_list_link
;
339 char _M_client_data
[1]; // The client sees this.
342 static _Obj
* volatile _S_free_list
[];
343 static size_t _S_freelist_index(size_t __bytes
)
344 { return (((__bytes
) + (size_t)_ALIGN
-1)/(size_t)_ALIGN
- 1); }
346 // Returns an object of size __n, and optionally adds to size __n free list.
347 static void* _S_refill(size_t __n
);
348 // Allocates a chunk for nobjs of size size. nobjs may be reduced
349 // if it is inconvenient to allocate the requested number.
350 static char* _S_chunk_alloc(size_t __size
, int& __nobjs
);
352 // Chunk allocation state.
353 static char* _S_start_free
;
354 static char* _S_end_free
;
355 static size_t _S_heap_size
;
357 static _STL_mutex_lock _S_node_allocator_lock
;
359 // It would be nice to use _STL_auto_lock here. But we need a test whether
360 // threads are in use.
363 _Lock() { if (__threads
) _S_node_allocator_lock
._M_acquire_lock(); }
364 ~_Lock() { if (__threads
) _S_node_allocator_lock
._M_release_lock(); }
365 } __attribute__ ((__unused__
));
371 static void* allocate(size_t __n
)
375 if (__n
> (size_t) _MAX_BYTES
)
376 __ret
= __mem_interface::allocate(__n
);
379 _Obj
* volatile* __my_free_list
= _S_free_list
+ _S_freelist_index(__n
);
380 // Acquire the lock here with a constructor call. This ensures that
381 // it is released in exit or during stack unwinding.
382 _Lock __lock_instance
;
383 _Obj
* __restrict__ __result
= *__my_free_list
;
385 __ret
= _S_refill(_S_round_up(__n
));
388 *__my_free_list
= __result
-> _M_free_list_link
;
397 static void deallocate(void* __p
, size_t __n
)
399 if (__n
> (size_t) _MAX_BYTES
)
400 __mem_interface::deallocate(__p
, __n
);
403 _Obj
* volatile* __my_free_list
404 = _S_free_list
+ _S_freelist_index(__n
);
405 _Obj
* __q
= (_Obj
*)__p
;
407 // Acquire the lock here with a constructor call. This ensures that
408 // it is released in exit or during stack unwinding.
409 _Lock __lock_instance
;
410 __q
-> _M_free_list_link
= *__my_free_list
;
411 *__my_free_list
= __q
;
415 static void* reallocate(void* __p
, size_t __old_sz
, size_t __new_sz
);
419 template <bool __threads
, int __inst
>
420 inline bool operator==(const __default_alloc_template
<__threads
, __inst
>&,
421 const __default_alloc_template
<__threads
, __inst
>&)
426 template <bool __threads
, int __inst
>
427 inline bool operator!=(const __default_alloc_template
<__threads
, __inst
>&,
428 const __default_alloc_template
<__threads
, __inst
>&)
434 // We allocate memory in large chunks in order to avoid fragmenting the
435 // malloc heap (or whatever __mem_interface is using) too much. We assume
436 // that __size is properly aligned. We hold the allocation lock.
437 template <bool __threads
, int __inst
>
439 __default_alloc_template
<__threads
, __inst
>::_S_chunk_alloc(size_t __size
,
443 size_t __total_bytes
= __size
* __nobjs
;
444 size_t __bytes_left
= _S_end_free
- _S_start_free
;
446 if (__bytes_left
>= __total_bytes
)
448 __result
= _S_start_free
;
449 _S_start_free
+= __total_bytes
;
452 else if (__bytes_left
>= __size
)
454 __nobjs
= (int)(__bytes_left
/__size
);
455 __total_bytes
= __size
* __nobjs
;
456 __result
= _S_start_free
;
457 _S_start_free
+= __total_bytes
;
462 size_t __bytes_to_get
=
463 2 * __total_bytes
+ _S_round_up(_S_heap_size
>> 4);
464 // Try to make use of the left-over piece.
465 if (__bytes_left
> 0)
467 _Obj
* volatile* __my_free_list
=
468 _S_free_list
+ _S_freelist_index(__bytes_left
);
470 ((_Obj
*)_S_start_free
) -> _M_free_list_link
= *__my_free_list
;
471 *__my_free_list
= (_Obj
*)_S_start_free
;
473 _S_start_free
= (char*) __mem_interface::allocate(__bytes_to_get
);
474 if (0 == _S_start_free
)
477 _Obj
* volatile* __my_free_list
;
479 // Try to make do with what we have. That can't hurt. We
480 // do not try smaller requests, since that tends to result
481 // in disaster on multi-process machines.
483 for (; __i
<= (size_t) _MAX_BYTES
; __i
+= (size_t) _ALIGN
)
485 __my_free_list
= _S_free_list
+ _S_freelist_index(__i
);
486 __p
= *__my_free_list
;
489 *__my_free_list
= __p
-> _M_free_list_link
;
490 _S_start_free
= (char*)__p
;
491 _S_end_free
= _S_start_free
+ __i
;
492 return(_S_chunk_alloc(__size
, __nobjs
));
493 // Any leftover piece will eventually make it to the
497 _S_end_free
= 0; // In case of exception.
498 _S_start_free
= (char*)__mem_interface::allocate(__bytes_to_get
);
499 // This should either throw an exception or remedy the situation.
500 // Thus we assume it succeeded.
502 _S_heap_size
+= __bytes_to_get
;
503 _S_end_free
= _S_start_free
+ __bytes_to_get
;
504 return(_S_chunk_alloc(__size
, __nobjs
));
509 // Returns an object of size __n, and optionally adds to "size __n"'s free list.
510 // We assume that __n is properly aligned. We hold the allocation lock.
511 template <bool __threads
, int __inst
>
513 __default_alloc_template
<__threads
, __inst
>::_S_refill(size_t __n
)
516 char* __chunk
= _S_chunk_alloc(__n
, __nobjs
);
517 _Obj
* volatile* __my_free_list
;
523 if (1 == __nobjs
) return(__chunk
);
524 __my_free_list
= _S_free_list
+ _S_freelist_index(__n
);
526 /* Build free list in chunk */
527 __result
= (_Obj
*)__chunk
;
528 *__my_free_list
= __next_obj
= (_Obj
*)(__chunk
+ __n
);
529 for (__i
= 1; ; __i
++) {
530 __current_obj
= __next_obj
;
531 __next_obj
= (_Obj
*)((char*)__next_obj
+ __n
);
532 if (__nobjs
- 1 == __i
) {
533 __current_obj
-> _M_free_list_link
= 0;
536 __current_obj
-> _M_free_list_link
= __next_obj
;
543 template <bool threads
, int inst
>
545 __default_alloc_template
<threads
, inst
>::reallocate(void* __p
,
552 if (__old_sz
> (size_t) _MAX_BYTES
&& __new_sz
> (size_t) _MAX_BYTES
) {
553 return(realloc(__p
, __new_sz
));
555 if (_S_round_up(__old_sz
) == _S_round_up(__new_sz
)) return(__p
);
556 __result
= allocate(__new_sz
);
557 __copy_sz
= __new_sz
> __old_sz
? __old_sz
: __new_sz
;
558 memcpy(__result
, __p
, __copy_sz
);
559 deallocate(__p
, __old_sz
);
563 template <bool __threads
, int __inst
>
565 __default_alloc_template
<__threads
, __inst
>::_S_node_allocator_lock
566 __STL_MUTEX_INITIALIZER
;
568 template <bool __threads
, int __inst
>
569 char* __default_alloc_template
<__threads
, __inst
>::_S_start_free
= 0;
571 template <bool __threads
, int __inst
>
572 char* __default_alloc_template
<__threads
, __inst
>::_S_end_free
= 0;
574 template <bool __threads
, int __inst
>
575 size_t __default_alloc_template
<__threads
, __inst
>::_S_heap_size
= 0;
577 template <bool __threads
, int __inst
>
578 typename __default_alloc_template
<__threads
, __inst
>::_Obj
* volatile
579 __default_alloc_template
<__threads
, __inst
> ::_S_free_list
[
580 __default_alloc_template
<__threads
, __inst
>::_NFREELISTS
];
583 typedef __default_alloc_template
<true, 0> __alloc
;
584 typedef __default_alloc_template
<false, 0> __single_client_alloc
;
587 #endif /* ! __USE_MALLOC */
591 * This is a "standard" allocator, as per [20.4]. The private _Alloc is
592 * "SGI" style. (See comments at the top of stl_alloc.h.)
594 * The underlying allocator behaves as follows.
595 * - if __USE_MALLOC then
596 * - thread safety depends on malloc and is entirely out of our hands
597 * - __malloc_alloc_template is used for memory requests
598 * - else (the default)
599 * - __default_alloc_template is used via two typedefs
600 * - "__single_client_alloc" typedef does no locking for threads
601 * - "__alloc" typedef is threadsafe via the locks
602 * - __new_alloc is used for memory requests
604 * (See @link Allocators allocators info @endlink for more.)
609 typedef __alloc _Alloc
; // The underlying allocator.
611 typedef size_t size_type
;
612 typedef ptrdiff_t difference_type
;
613 typedef _Tp
* pointer
;
614 typedef const _Tp
* const_pointer
;
615 typedef _Tp
& reference
;
616 typedef const _Tp
& const_reference
;
617 typedef _Tp value_type
;
619 template <class _Tp1
> struct rebind
{
620 typedef allocator
<_Tp1
> other
;
623 allocator() throw() {}
624 allocator(const allocator
&) throw() {}
625 template <class _Tp1
> allocator(const allocator
<_Tp1
>&) throw() {}
626 ~allocator() throw() {}
628 pointer
address(reference __x
) const { return &__x
; }
629 const_pointer
address(const_reference __x
) const { return &__x
; }
631 // __n is permitted to be 0. The C++ standard says nothing about what
632 // the return value is when __n == 0.
633 _Tp
* allocate(size_type __n
, const void* = 0) {
634 return __n
!= 0 ? static_cast<_Tp
*>(_Alloc::allocate(__n
* sizeof(_Tp
)))
638 // __p is not permitted to be a null pointer.
639 void deallocate(pointer __p
, size_type __n
)
640 { _Alloc::deallocate(__p
, __n
* sizeof(_Tp
)); }
642 size_type
max_size() const throw()
643 { return size_t(-1) / sizeof(_Tp
); }
645 void construct(pointer __p
, const _Tp
& __val
) { new(__p
) _Tp(__val
); }
646 void destroy(pointer __p
) { __p
->~_Tp(); }
650 class allocator
<void> {
652 typedef size_t size_type
;
653 typedef ptrdiff_t difference_type
;
654 typedef void* pointer
;
655 typedef const void* const_pointer
;
656 typedef void value_type
;
658 template <class _Tp1
> struct rebind
{
659 typedef allocator
<_Tp1
> other
;
664 template <class _T1
, class _T2
>
665 inline bool operator==(const allocator
<_T1
>&, const allocator
<_T2
>&)
670 template <class _T1
, class _T2
>
671 inline bool operator!=(const allocator
<_T1
>&, const allocator
<_T2
>&)
679 * Allocator adaptor to turn an "SGI" style allocator (e.g., __alloc,
680 * __malloc_alloc_template) into a "standard" conforming allocator. Note
681 * that this adaptor does *not* assume that all objects of the underlying
682 * alloc class are identical, nor does it assume that all of the underlying
683 * alloc's member functions are static member functions. Note, also, that
684 * __allocator<_Tp, __alloc> is essentially the same thing as allocator<_Tp>.
686 * (See @link Allocators allocators info @endlink for more.)
688 template <class _Tp
, class _Alloc
>
691 _Alloc __underlying_alloc
;
693 typedef size_t size_type
;
694 typedef ptrdiff_t difference_type
;
695 typedef _Tp
* pointer
;
696 typedef const _Tp
* const_pointer
;
697 typedef _Tp
& reference
;
698 typedef const _Tp
& const_reference
;
699 typedef _Tp value_type
;
701 template <class _Tp1
> struct rebind
{
702 typedef __allocator
<_Tp1
, _Alloc
> other
;
705 __allocator() throw() {}
706 __allocator(const __allocator
& __a
) throw()
707 : __underlying_alloc(__a
.__underlying_alloc
) {}
708 template <class _Tp1
>
709 __allocator(const __allocator
<_Tp1
, _Alloc
>& __a
) throw()
710 : __underlying_alloc(__a
.__underlying_alloc
) {}
711 ~__allocator() throw() {}
713 pointer
address(reference __x
) const { return &__x
; }
714 const_pointer
address(const_reference __x
) const { return &__x
; }
716 // __n is permitted to be 0.
717 _Tp
* allocate(size_type __n
, const void* = 0) {
719 ? static_cast<_Tp
*>(__underlying_alloc
.allocate(__n
* sizeof(_Tp
)))
723 // __p is not permitted to be a null pointer.
724 void deallocate(pointer __p
, size_type __n
)
725 { __underlying_alloc
.deallocate(__p
, __n
* sizeof(_Tp
)); }
727 size_type
max_size() const throw()
728 { return size_t(-1) / sizeof(_Tp
); }
730 void construct(pointer __p
, const _Tp
& __val
) { new(__p
) _Tp(__val
); }
731 void destroy(pointer __p
) { __p
->~_Tp(); }
734 template <class _Alloc
>
735 class __allocator
<void, _Alloc
> {
736 typedef size_t size_type
;
737 typedef ptrdiff_t difference_type
;
738 typedef void* pointer
;
739 typedef const void* const_pointer
;
740 typedef void value_type
;
742 template <class _Tp1
> struct rebind
{
743 typedef __allocator
<_Tp1
, _Alloc
> other
;
747 template <class _Tp
, class _Alloc
>
748 inline bool operator==(const __allocator
<_Tp
, _Alloc
>& __a1
,
749 const __allocator
<_Tp
, _Alloc
>& __a2
)
751 return __a1
.__underlying_alloc
== __a2
.__underlying_alloc
;
754 template <class _Tp
, class _Alloc
>
755 inline bool operator!=(const __allocator
<_Tp
, _Alloc
>& __a1
,
756 const __allocator
<_Tp
, _Alloc
>& __a2
)
758 return __a1
.__underlying_alloc
!= __a2
.__underlying_alloc
;
763 /** Comparison operators for all of the predifined SGI-style allocators.
764 * This ensures that __allocator<malloc_alloc> (for example) will work
765 * correctly. As required, all allocators compare equal.
768 inline bool operator==(const __malloc_alloc_template
<inst
>&,
769 const __malloc_alloc_template
<inst
>&)
774 template <int __inst
>
775 inline bool operator!=(const __malloc_alloc_template
<__inst
>&,
776 const __malloc_alloc_template
<__inst
>&)
781 template <class _Alloc
>
782 inline bool operator==(const __debug_alloc
<_Alloc
>&,
783 const __debug_alloc
<_Alloc
>&) {
787 template <class _Alloc
>
788 inline bool operator!=(const __debug_alloc
<_Alloc
>&,
789 const __debug_alloc
<_Alloc
>&) {
797 * Another allocator adaptor: _Alloc_traits. This serves two purposes.
798 * First, make it possible to write containers that can use either "SGI"
799 * style allocators or "standard" allocators. Second, provide a mechanism
800 * so that containers can query whether or not the allocator has distinct
801 * instances. If not, the container can avoid wasting a word of memory to
802 * store an empty object. For examples of use, see stl_vector.h, etc, or
803 * any of the other classes derived from this one.
805 * This adaptor uses partial specialization. The general case of
806 * _Alloc_traits<_Tp, _Alloc> assumes that _Alloc is a
807 * standard-conforming allocator, possibly with non-equal instances and
808 * non-static members. (It still behaves correctly even if _Alloc has
809 * static member and if all instances are equal. Refinements affect
810 * performance, not correctness.)
812 * There are always two members: allocator_type, which is a standard-
813 * conforming allocator type for allocating objects of type _Tp, and
814 * _S_instanceless, a static const member of type bool. If
815 * _S_instanceless is true, this means that there is no difference
816 * between any two instances of type allocator_type. Furthermore, if
817 * _S_instanceless is true, then _Alloc_traits has one additional
818 * member: _Alloc_type. This type encapsulates allocation and
819 * deallocation of objects of type _Tp through a static interface; it
820 * has two member functions, whose signatures are
822 * - static _Tp* allocate(size_t)
823 * - static void deallocate(_Tp*, size_t)
825 * The size_t parameters are "standard" style (see top of stl_alloc.h) in
826 * that they take counts, not sizes.
829 * (See @link Allocators allocators info @endlink for more.)
832 // The fully general version.
833 template <class _Tp
, class _Allocator
>
836 static const bool _S_instanceless
= false;
837 typedef typename
_Allocator::template rebind
<_Tp
>::other allocator_type
;
840 template <class _Tp
, class _Allocator
>
841 const bool _Alloc_traits
<_Tp
, _Allocator
>::_S_instanceless
;
843 /// The version for the default allocator.
844 template <class _Tp
, class _Tp1
>
845 struct _Alloc_traits
<_Tp
, allocator
<_Tp1
> >
847 static const bool _S_instanceless
= true;
848 typedef __simple_alloc
<_Tp
, __alloc
> _Alloc_type
;
849 typedef allocator
<_Tp
> allocator_type
;
854 /// Versions for the predefined "SGI" style allocators.
855 template <class _Tp
, int __inst
>
856 struct _Alloc_traits
<_Tp
, __malloc_alloc_template
<__inst
> >
858 static const bool _S_instanceless
= true;
859 typedef __simple_alloc
<_Tp
, __malloc_alloc_template
<__inst
> > _Alloc_type
;
860 typedef __allocator
<_Tp
, __malloc_alloc_template
<__inst
> > allocator_type
;
864 template <class _Tp
, bool __threads
, int __inst
>
865 struct _Alloc_traits
<_Tp
, __default_alloc_template
<__threads
, __inst
> >
867 static const bool _S_instanceless
= true;
868 typedef __simple_alloc
<_Tp
, __default_alloc_template
<__threads
, __inst
> >
870 typedef __allocator
<_Tp
, __default_alloc_template
<__threads
, __inst
> >
875 template <class _Tp
, class _Alloc
>
876 struct _Alloc_traits
<_Tp
, __debug_alloc
<_Alloc
> >
878 static const bool _S_instanceless
= true;
879 typedef __simple_alloc
<_Tp
, __debug_alloc
<_Alloc
> > _Alloc_type
;
880 typedef __allocator
<_Tp
, __debug_alloc
<_Alloc
> > allocator_type
;
885 /// Versions for the __allocator adaptor used with the predefined "SGI" style allocators.
886 template <class _Tp
, class _Tp1
, int __inst
>
887 struct _Alloc_traits
<_Tp
,
888 __allocator
<_Tp1
, __malloc_alloc_template
<__inst
> > >
890 static const bool _S_instanceless
= true;
891 typedef __simple_alloc
<_Tp
, __malloc_alloc_template
<__inst
> > _Alloc_type
;
892 typedef __allocator
<_Tp
, __malloc_alloc_template
<__inst
> > allocator_type
;
896 template <class _Tp
, class _Tp1
, bool __thr
, int __inst
>
897 struct _Alloc_traits
<_Tp
,
899 __default_alloc_template
<__thr
, __inst
> > >
901 static const bool _S_instanceless
= true;
902 typedef __simple_alloc
<_Tp
, __default_alloc_template
<__thr
,__inst
> >
904 typedef __allocator
<_Tp
, __default_alloc_template
<__thr
,__inst
> >
909 template <class _Tp
, class _Tp1
, class _Alloc
>
910 struct _Alloc_traits
<_Tp
, __allocator
<_Tp1
, __debug_alloc
<_Alloc
> > >
912 static const bool _S_instanceless
= true;
913 typedef __simple_alloc
<_Tp
, __debug_alloc
<_Alloc
> > _Alloc_type
;
914 typedef __allocator
<_Tp
, __debug_alloc
<_Alloc
> > allocator_type
;
920 #endif /* __GLIBCPP_INTERNAL_ALLOC_H */