]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/alloc-pool.h
Share memory blocks between pool allocators
[thirdparty/gcc.git] / gcc / alloc-pool.h
1 /* Functions to support a pool of allocatable objects
2 Copyright (C) 1997-2015 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@cgsoftware.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20 #ifndef ALLOC_POOL_H
21 #define ALLOC_POOL_H
22
23 #include "memory-block.h"
24
25 extern void dump_alloc_pool_statistics (void);
26
27 typedef unsigned long ALLOC_POOL_ID_TYPE;
28
29 /* Last used ID. */
30 extern ALLOC_POOL_ID_TYPE last_id;
31
32 /* Pool allocator memory usage. */
33 struct pool_usage: public mem_usage
34 {
35 /* Default contructor. */
36 pool_usage (): m_element_size (0), m_pool_name ("") {}
37 /* Constructor. */
38 pool_usage (size_t allocated, size_t times, size_t peak,
39 size_t instances, size_t element_size,
40 const char *pool_name)
41 : mem_usage (allocated, times, peak, instances),
42 m_element_size (element_size),
43 m_pool_name (pool_name) {}
44
45 /* Sum the usage with SECOND usage. */
46 pool_usage
47 operator+ (const pool_usage &second)
48 {
49 return pool_usage (m_allocated + second.m_allocated,
50 m_times + second.m_times,
51 m_peak + second.m_peak,
52 m_instances + second.m_instances,
53 m_element_size, m_pool_name);
54 }
55
56 /* Dump usage coupled to LOC location, where TOTAL is sum of all rows. */
57 inline void
58 dump (mem_location *loc, mem_usage &total) const
59 {
60 char *location_string = loc->to_string ();
61
62 fprintf (stderr, "%-32s%-48s %6li%10li:%5.1f%%%10li%10li:%5.1f%%%12li\n",
63 m_pool_name, location_string, (long)m_instances,
64 (long)m_allocated, get_percent (m_allocated, total.m_allocated),
65 (long)m_peak, (long)m_times,
66 get_percent (m_times, total.m_times),
67 (long)m_element_size);
68
69 free (location_string);
70 }
71
72 /* Dump header with NAME. */
73 static inline void
74 dump_header (const char *name)
75 {
76 fprintf (stderr, "%-32s%-48s %6s%11s%16s%17s%12s\n", "Pool name", name,
77 "Pools", "Leak", "Peak", "Times", "Elt size");
78 print_dash_line ();
79 }
80
81 /* Dump footer. */
82 inline void
83 dump_footer ()
84 {
85 print_dash_line ();
86 fprintf (stderr, "%s%82li%10li\n", "Total", (long)m_instances,
87 (long)m_allocated);
88 print_dash_line ();
89 }
90
91 /* Element size. */
92 size_t m_element_size;
93 /* Pool name. */
94 const char *m_pool_name;
95 };
96
97 extern mem_alloc_description<pool_usage> pool_allocator_usage;
98
99 #if 0
100 /* If a pool with custom block size is needed, one might use the following
101 template. An instance of this template can be used as a parameter for
102 instantiating base_pool_allocator template:
103
104 typedef custom_block_allocator <128*1024> huge_block_allocator;
105 ...
106 static base_pool_allocator <huge_block_allocator>
107 value_pool ("value", 16384);
108
109 Right now it's not used anywhere in the code, and is given here as an
110 example). */
111
112 template <size_t BlockSize>
113 class custom_block_allocator
114 {
115 public:
116 static const size_t block_size = BlockSize;
117
118 static inline void *
119 allocate () ATTRIBUTE_MALLOC
120 {
121 return XNEWVEC (char, BlockSize);
122 }
123
124 static inline void
125 release (void *block)
126 {
127 XDELETEVEC (block);
128 }
129 };
130 #endif
131
132 /* Generic pool allocator. */
133
134 template <typename TBlockAllocator>
135 class base_pool_allocator
136 {
137 public:
138 /* Default constructor for pool allocator called NAME. */
139 base_pool_allocator (const char *name, size_t size CXX_MEM_STAT_INFO);
140 ~base_pool_allocator ();
141 void release ();
142 void release_if_empty ();
143 void *allocate () ATTRIBUTE_MALLOC;
144 void remove (void *object);
145 size_t num_elts_current ();
146
147 private:
148 struct allocation_pool_list
149 {
150 allocation_pool_list *next;
151 };
152
153 /* Initialize a pool allocator. */
154 void initialize ();
155
156 struct allocation_object
157 {
158 /* The ID of alloc pool which the object was allocated from. */
159 ALLOC_POOL_ID_TYPE id;
160
161 union
162 {
163 /* The data of the object. */
164 char data[1];
165
166 /* Because we want any type of data to be well aligned after the ID,
167 the following elements are here. They are never accessed so
168 the allocated object may be even smaller than this structure.
169 We do not care about alignment for floating-point types. */
170 char *align_p;
171 int64_t align_i;
172 } u;
173
174 static inline allocation_object*
175 get_instance (void *data_ptr)
176 {
177 return (allocation_object *)(((char *)(data_ptr))
178 - offsetof (allocation_object,
179 u.data));
180 }
181
182 static inline void*
183 get_data (void *instance_ptr)
184 {
185 return (void*)(((allocation_object *) instance_ptr)->u.data);
186 }
187 };
188
189 /* Align X to 8. */
190 static inline size_t
191 align_eight (size_t x)
192 {
193 return (((x+7) >> 3) << 3);
194 }
195
196 const char *m_name;
197 ALLOC_POOL_ID_TYPE m_id;
198 size_t m_elts_per_block;
199
200 /* These are the elements that have been allocated at least once
201 and freed. */
202 allocation_pool_list *m_returned_free_list;
203
204 /* These are the elements that have not yet been allocated out of
205 the last block obtained from XNEWVEC. */
206 char* m_virgin_free_list;
207
208 /* The number of elements in the virgin_free_list that can be
209 allocated before needing another block. */
210 size_t m_virgin_elts_remaining;
211 /* The number of elements that are allocated. */
212 size_t m_elts_allocated;
213 /* The number of elements that are released. */
214 size_t m_elts_free;
215 /* The number of allocated blocks. */
216 size_t m_blocks_allocated;
217 /* List of blocks that are used to allocate new objects. */
218 allocation_pool_list *m_block_list;
219 /* Size of a pool elements in bytes. */
220 size_t m_elt_size;
221 /* Size in bytes that should be allocated for each element. */
222 size_t m_size;
223 /* Flag if a pool allocator is initialized. */
224 bool m_initialized;
225 /* Memory allocation location. */
226 mem_location m_location;
227 };
228
229 template <typename TBlockAllocator>
230 inline
231 base_pool_allocator <TBlockAllocator>::base_pool_allocator (
232 const char *name, size_t size MEM_STAT_DECL):
233 m_name (name), m_id (0), m_elts_per_block (0), m_returned_free_list (NULL),
234 m_virgin_free_list (NULL), m_virgin_elts_remaining (0), m_elts_allocated (0),
235 m_elts_free (0), m_blocks_allocated (0), m_block_list (NULL), m_size (size),
236 m_initialized (false), m_location (ALLOC_POOL_ORIGIN, false PASS_MEM_STAT) {}
237
238 /* Initialize a pool allocator. */
239
240 template <typename TBlockAllocator>
241 inline void
242 base_pool_allocator <TBlockAllocator>::initialize ()
243 {
244 gcc_checking_assert (!m_initialized);
245 m_initialized = true;
246
247 size_t size = m_size;
248
249 gcc_checking_assert (m_name);
250
251 /* Make size large enough to store the list header. */
252 if (size < sizeof (allocation_pool_list*))
253 size = sizeof (allocation_pool_list*);
254
255 /* Now align the size to a multiple of 8. */
256 size = align_eight (size);
257
258 /* Add the aligned size of ID. */
259 size += offsetof (allocation_object, u.data);
260
261 m_elt_size = size;
262
263 if (GATHER_STATISTICS)
264 {
265 pool_usage *u = pool_allocator_usage.register_descriptor
266 (this, new mem_location (m_location));
267
268 u->m_element_size = m_elt_size;
269 u->m_pool_name = m_name;
270 }
271
272 /* List header size should be a multiple of 8. */
273 size_t header_size = align_eight (sizeof (allocation_pool_list));
274
275 m_elts_per_block = (TBlockAllocator::block_size - header_size) / size;
276 gcc_checking_assert (m_elts_per_block != 0);
277
278 #ifdef ENABLE_CHECKING
279 /* Increase the last used ID and use it for this pool.
280 ID == 0 is used for free elements of pool so skip it. */
281 last_id++;
282 if (last_id == 0)
283 last_id++;
284
285 m_id = last_id;
286 #endif
287 }
288
289 /* Free all memory allocated for the given memory pool. */
290 template <typename TBlockAllocator>
291 inline void
292 base_pool_allocator <TBlockAllocator>::release ()
293 {
294 if (!m_initialized)
295 return;
296
297 allocation_pool_list *block, *next_block;
298
299 /* Free each block allocated to the pool. */
300 for (block = m_block_list; block != NULL; block = next_block)
301 {
302 next_block = block->next;
303 TBlockAllocator::release (block);
304 }
305
306 if (GATHER_STATISTICS)
307 {
308 pool_allocator_usage.release_instance_overhead
309 (this, (m_elts_allocated - m_elts_free) * m_elt_size);
310 }
311
312 m_returned_free_list = NULL;
313 m_virgin_free_list = NULL;
314 m_virgin_elts_remaining = 0;
315 m_elts_allocated = 0;
316 m_elts_free = 0;
317 m_blocks_allocated = 0;
318 m_block_list = NULL;
319 }
320
321 template <typename TBlockAllocator>
322 inline void
323 base_pool_allocator <TBlockAllocator>::release_if_empty ()
324 {
325 if (m_elts_free == m_elts_allocated)
326 release ();
327 }
328
329 template <typename TBlockAllocator>
330 inline base_pool_allocator <TBlockAllocator>::~base_pool_allocator ()
331 {
332 release ();
333 }
334
335 /* Allocates one element from the pool specified. */
336 template <typename TBlockAllocator>
337 inline void*
338 base_pool_allocator <TBlockAllocator>::allocate ()
339 {
340 if (!m_initialized)
341 initialize ();
342
343 allocation_pool_list *header;
344 #ifdef ENABLE_VALGRIND_ANNOTATIONS
345 int size;
346 #endif
347
348 if (GATHER_STATISTICS)
349 {
350 pool_allocator_usage.register_instance_overhead (m_elt_size, this);
351 }
352
353 #ifdef ENABLE_VALGRIND_ANNOTATIONS
354 size = m_elt_size - offsetof (allocation_object, u.data);
355 #endif
356
357 /* If there are no more free elements, make some more!. */
358 if (!m_returned_free_list)
359 {
360 char *block;
361 if (!m_virgin_elts_remaining)
362 {
363 allocation_pool_list *block_header;
364
365 /* Make the block. */
366 block = reinterpret_cast<char *> (TBlockAllocator::allocate ());
367 block_header = (allocation_pool_list*) block;
368 block += align_eight (sizeof (allocation_pool_list));
369
370 /* Throw it on the block list. */
371 block_header->next = m_block_list;
372 m_block_list = block_header;
373
374 /* Make the block available for allocation. */
375 m_virgin_free_list = block;
376 m_virgin_elts_remaining = m_elts_per_block;
377
378 /* Also update the number of elements we have free/allocated, and
379 increment the allocated block count. */
380 m_elts_allocated += m_elts_per_block;
381 m_elts_free += m_elts_per_block;
382 m_blocks_allocated += 1;
383 }
384
385 /* We now know that we can take the first elt off the virgin list and
386 put it on the returned list. */
387 block = m_virgin_free_list;
388 header = (allocation_pool_list*) allocation_object::get_data (block);
389 header->next = NULL;
390 #ifdef ENABLE_CHECKING
391 /* Mark the element to be free. */
392 ((allocation_object*) block)->id = 0;
393 #endif
394 VALGRIND_DISCARD (VALGRIND_MAKE_MEM_NOACCESS (header,size));
395 m_returned_free_list = header;
396 m_virgin_free_list += m_elt_size;
397 m_virgin_elts_remaining--;
398
399 }
400
401 /* Pull the first free element from the free list, and return it. */
402 header = m_returned_free_list;
403 VALGRIND_DISCARD (VALGRIND_MAKE_MEM_DEFINED (header, sizeof (*header)));
404 m_returned_free_list = header->next;
405 m_elts_free--;
406
407 #ifdef ENABLE_CHECKING
408 /* Set the ID for element. */
409 allocation_object::get_instance (header)->id = m_id;
410 #endif
411 VALGRIND_DISCARD (VALGRIND_MAKE_MEM_UNDEFINED (header, size));
412
413 return (void *)(header);
414 }
415
416 /* Puts PTR back on POOL's free list. */
417 template <typename TBlockAllocator>
418 inline void
419 base_pool_allocator <TBlockAllocator>::remove (void *object)
420 {
421 gcc_checking_assert (m_initialized);
422
423 allocation_pool_list *header;
424 int size ATTRIBUTE_UNUSED;
425 size = m_elt_size - offsetof (allocation_object, u.data);
426
427 #ifdef ENABLE_CHECKING
428 gcc_assert (object
429 /* Check if we free more than we allocated, which is Bad (TM). */
430 && m_elts_free < m_elts_allocated
431 /* Check whether the PTR was allocated from POOL. */
432 && m_id == allocation_object::get_instance (object)->id);
433
434 memset (object, 0xaf, size);
435
436 /* Mark the element to be free. */
437 allocation_object::get_instance (object)->id = 0;
438 #endif
439
440 header = (allocation_pool_list*) object;
441 header->next = m_returned_free_list;
442 m_returned_free_list = header;
443 VALGRIND_DISCARD (VALGRIND_MAKE_MEM_NOACCESS (object, size));
444 m_elts_free++;
445
446 if (GATHER_STATISTICS)
447 {
448 pool_allocator_usage.release_instance_overhead (this, m_elt_size);
449 }
450 }
451
452 /* Number of elements currently active (not returned to pool). Used for cheap
453 consistency checks. */
454 template <typename TBlockAllocator>
455 inline size_t
456 base_pool_allocator <TBlockAllocator>::num_elts_current ()
457 {
458 return m_elts_allocated - m_elts_free;
459 }
460
461 /* Specialization of base_pool_allocator which should be used in most cases.
462 Another specialization may be needed, if object size is greater than
463 memory_block_pool::block_size (64 KB). */
464 typedef base_pool_allocator <memory_block_pool> pool_allocator;
465
466 /* Type based memory pool allocator. */
467 template <typename T>
468 class object_allocator
469 {
470 public:
471 /* Default constructor for pool allocator called NAME. */
472 object_allocator (const char *name CXX_MEM_STAT_INFO):
473 m_allocator (name, sizeof (T) PASS_MEM_STAT) {}
474
475 inline void
476 release ()
477 {
478 m_allocator.release ();
479 }
480
481 inline void release_if_empty ()
482 {
483 m_allocator.release_if_empty ();
484 }
485
486 inline T *
487 allocate () ATTRIBUTE_MALLOC
488 {
489 return ::new (m_allocator.allocate ()) T ();
490 }
491
492 inline void
493 remove (T *object)
494 {
495 /* Call destructor. */
496 object->~T ();
497
498 m_allocator.remove (object);
499 }
500
501 inline size_t
502 num_elts_current ()
503 {
504 return m_allocator.num_elts_current ();
505 }
506
507 private:
508 pool_allocator m_allocator;
509 };
510
511 /* Store information about each particular alloc_pool. Note that this
512 will underestimate the amount the amount of storage used by a small amount:
513 1) The overhead in a pool is not accounted for.
514 2) The unallocated elements in a block are not accounted for. Note
515 that this can at worst case be one element smaller that the block
516 size for that pool. */
517 struct alloc_pool_descriptor
518 {
519 /* Number of pools allocated. */
520 unsigned long created;
521 /* Gross allocated storage. */
522 unsigned long allocated;
523 /* Amount of currently active storage. */
524 unsigned long current;
525 /* Peak amount of storage used. */
526 unsigned long peak;
527 /* Size of element in the pool. */
528 int elt_size;
529 };
530
531 /* Helper for classes that do not provide default ctor. */
532
533 template <typename T>
534 inline void *
535 operator new (size_t, object_allocator<T> &a)
536 {
537 return a.allocate ();
538 }
539
540 /* Hashtable mapping alloc_pool names to descriptors. */
541 extern hash_map<const char *, alloc_pool_descriptor> *alloc_pool_hash;
542
543
544 #endif