toys

toys.git
git clone git://git.lenczewski.org/toys.git
Log | Files | Refs | README | LICENSE

allocators.h (11042B)


      1 #ifndef ALLOCATORS_H
      2 #define ALLOCATORS_H
      3 
      4 #include <stdalign.h>
      5 #include <stddef.h>
      6 #include <stdint.h>
      7 
      8 #include "assert.h"
      9 #include "utils.h"
     10 
     11 /* arena allocator
     12  * ---------------------------------------------------------------------------
     13  *  Linear/bump allocator. Supports allocations of arbitray size and with
     14  *  power-of-two alignments. Does not support freeing individual allocations.
     15  */
     16 struct arena_allocator {
     17 	void *ptr;
     18 	size_t cap, len;
     19 };
     20 
     21 inline void
     22 arena_reset(struct arena_allocator *allocator)
     23 {
     24 	allocator->len = 0;
     25 }
     26 
     27 inline void *
     28 arena_alloc(struct arena_allocator *allocator, size_t size, size_t align)
     29 {
     30 	ASSERT(size);
     31 	ASSERT(IS_POW2(align));
     32 
     33 	uintptr_t base = (uintptr_t) allocator->ptr;
     34 	uintptr_t end = base + allocator->cap;
     35 
     36 	uintptr_t aligned_ptr = ALIGN_NEXT(base + allocator->len, align);
     37 	if (end < aligned_ptr + size)
     38 		return NULL;
     39 
     40 	allocator->len = (aligned_ptr + size) - base;
     41 
     42 	return (void *) aligned_ptr;
     43 }
     44 
     45 #define ARENA_ALLOC_ARRAY(arena, T, n) \
     46 	((T *) arena_alloc((arena), sizeof(T) * (n), alignof(T)))
     47 
     48 #define ARENA_ALLOC_SIZED(arena, T) \
     49 	ARENA_ALLOC_ARRAY((arena), T, 1)
     50 
     51 /* stack allocator
     52  * ---------------------------------------------------------------------------
     53  *  A LIFO allocator. Supports allocations of arbitrary size but with limited
     54  *  power-of-two alignments. Supports freeing individual allocations, with
     55  *  frees of older allocations freeing more recent allocations. Frees of newer
     56  *  allocations are assumed never to happen after frees of the then-older
     57  *  allocations (i.e. no alloc1-alloc2-free1-alloc3-free2-...).
     58  */
     59 
     60 struct stack_allocator_header {
     61 	uint8_t padding;
     62 };
     63 
     64 #define STACK_ALLOCATOR_MAX_ALIGNMENT \
     65 	(2 << ((8 * sizeof(struct stack_allocator_header)) - 1))
     66 
     67 struct stack_allocator {
     68 	void *ptr;
     69 	size_t cap, len;
     70 };
     71 
     72 inline void
     73 stack_reset(struct stack_allocator *allocator)
     74 {
     75 	allocator->len = 0;
     76 }
     77 
     78 inline void *
     79 stack_alloc(struct stack_allocator *allocator, size_t size, size_t align)
     80 {
     81 	ASSERT(size);
     82 	ASSERT(IS_POW2(align));
     83 	ASSERT(align < STACK_ALLOCATOR_MAX_ALIGNMENT);
     84 
     85 	uintptr_t base = (uintptr_t) allocator->ptr;
     86 	uintptr_t end = base + allocator->cap;
     87 
     88 	struct stack_allocator_header *hdr;
     89 	uintptr_t unaligned_ptr = base + allocator->len;
     90 	uintptr_t aligned_ptr = ALIGN_NEXT(unaligned_ptr + sizeof *hdr, align);
     91 	if (end < aligned_ptr + size)
     92 		return NULL;
     93 
     94 	hdr = (void *) (aligned_ptr - sizeof *hdr);
     95 	hdr->padding = aligned_ptr - unaligned_ptr;
     96 
     97 	allocator->len = (aligned_ptr + size) - base;
     98 
     99 	return (void *) aligned_ptr;
    100 }
    101 
    102 inline size_t
    103 stack_alloc_padding(void *ptr)
    104 {
    105 	struct stack_allocator_header *hdr = (void *) (((uintptr_t) ptr) - sizeof *hdr);
    106 	return hdr->padding;
    107 }
    108 
    109 inline void
    110 stack_free(struct stack_allocator *allocator, void *res)
    111 {
    112 	uintptr_t base = (uintptr_t) allocator->ptr, ptr = (uintptr_t) res;
    113 	allocator->len = (ptr - stack_alloc_padding(res)) - base;
    114 }
    115 
    116 /* pool allocator
    117  * ---------------------------------------------------------------------------
    118  *  A block-based allocator. Supports allocating out of a pool of memory blocks,
    119  *  all of a fixed power-of-two size and with an alignment dependent on the
    120  *  alignment of the main memory buffer, and the size of each block. Supports
    121  *  freeing individual allocations (i.e. returning blocks to the pool).
    122  */
    123 
    124 struct pool_allocator_freeblock {
    125 	struct pool_allocator_freeblock *next;
    126 };
    127 
    128 struct pool_allocator {
    129 	void *ptr;
    130 	size_t cap;
    131 
    132 	size_t block_size;
    133 	struct pool_allocator_freeblock freelist;
    134 };
    135 
    136 inline void
    137 pool_allocator_init(struct pool_allocator *allocator, size_t block_size)
    138 {
    139 	ASSERT(IS_POW2(block_size));
    140 	ASSERT(sizeof(struct pool_allocator_freeblock) <= block_size);
    141 
    142 	allocator->block_size = block_size;
    143 
    144 	uintptr_t base = (uintptr_t) allocator->ptr;
    145 	struct pool_allocator_freeblock **tail = &allocator->freelist.next;
    146 	for (size_t off = 0; off < allocator->cap; off += block_size) {
    147 		struct pool_allocator_freeblock *block = (void *) (base + off);
    148 
    149 		*tail = block;
    150 		tail = &block->next;
    151 	}
    152 	*tail = NULL;
    153 }
    154 
    155 inline void *
    156 pool_alloc(struct pool_allocator *allocator, size_t size, size_t align)
    157 {
    158 	ASSERT(size);
    159 	ASSERT(IS_POW2(align));
    160 
    161 	if (allocator->block_size < size)
    162 		return NULL;
    163 
    164 	struct pool_allocator_freeblock *block = allocator->freelist.next;
    165 	if (!block)
    166 		return NULL;
    167 
    168 	if (!IS_ALIGNED((uintptr_t) block, align))
    169 		return NULL;
    170 
    171 	allocator->freelist.next = block->next;
    172 
    173 	return block;
    174 }
    175 
    176 inline void
    177 pool_free(struct pool_allocator *allocator, void *res)
    178 {
    179 	struct pool_allocator_freeblock *block = res;
    180 	block->next = allocator->freelist.next;
    181 
    182 	allocator->freelist.next = block;
    183 }
    184 
    185 /* freelist-based heap allocator
    186  * ---------------------------------------------------------------------------
    187  *  A general-purpose allocator. Supports allocations of arbitrary size, and
    188  *  with power-of-two alignments. Supports unordered frees. Suffers from
    189  *  internal fragmentation.
    190  */
    191 
    192 struct freelist_allocator_header {
    193 	size_t padding;
    194 	size_t size;
    195 };
    196 
    197 struct freelist_allocator_block {
    198 	struct freelist_allocator_block *prev, *next;
    199 	size_t size;
    200 };
    201 
    202 inline void
    203 freelist_allocator_freelist_insert(struct freelist_allocator_block *block,
    204 				   struct freelist_allocator_block *prev,
    205 				   struct freelist_allocator_block *next)
    206 {
    207 	prev->next = block;
    208 	block->prev = prev;
    209 	next->prev = block;
    210 	block->next = next;
    211 }
    212 
    213 inline void
    214 freelist_allocator_freelist_remove(struct freelist_allocator_block *block)
    215 {
    216 	block->prev->next = block->next;
    217 	block->next->prev = block->prev;
    218 }
    219 
    220 struct freelist_allocator {
    221 	void *ptr;
    222 	size_t cap;
    223 
    224 	struct freelist_allocator_block freelist;
    225 };
    226 
    227 inline void
    228 freelist_allocator_init(struct freelist_allocator *allocator)
    229 {
    230 	ASSERT(IS_ALIGNED((uintptr_t) allocator->ptr,
    231 			  alignof(struct freelist_allocator_block)));
    232 
    233 	allocator->freelist.prev = allocator->freelist.next = &allocator->freelist;
    234 
    235 	struct freelist_allocator_block *root = allocator->ptr;
    236 	root->size = allocator->cap;
    237 
    238 	freelist_allocator_freelist_insert(root,
    239 					   allocator->freelist.prev,
    240 					   allocator->freelist.next);
    241 }
    242 
    243 inline void *
    244 freelist_alloc(struct freelist_allocator *allocator, size_t size, size_t align)
    245 {
    246 	ASSERT(size);
    247 	ASSERT(IS_POW2(align));
    248 
    249 	/* ensure that the allocation header can be placed immediately in
    250 	 * front of the main allocation
    251 	 */
    252 	if (align < alignof(struct freelist_allocator_header))
    253 		align = alignof(struct freelist_allocator_header);
    254 
    255 	struct freelist_allocator_header *hdr;
    256 
    257 	struct freelist_allocator_block *freelist = &allocator->freelist, *block;
    258 	for (block = freelist->next; block != freelist; block = block->next) {
    259 		uintptr_t block_ptr = (uintptr_t) block;
    260 		uintptr_t block_end = block_ptr + block->size;
    261 
    262 		uintptr_t aligned_ptr = ALIGN_NEXT(block_ptr + sizeof *hdr, align);
    263 		if (block_end < aligned_ptr + size)
    264 			continue;
    265 
    266 		/* allocate new freeblock if there remaining space */
    267 		uintptr_t new_block_ptr = ALIGN_NEXT(aligned_ptr + size,
    268 						     alignof(struct freelist_allocator_block));
    269 		if (new_block_ptr + sizeof *block < block_end) {
    270 			struct freelist_allocator_block *new_block = (void *) new_block_ptr;
    271 			new_block->size = block_end - new_block_ptr;
    272 
    273 			/* replace old block with new block in freelist */
    274 			freelist_allocator_freelist_insert(new_block,
    275 							   block->prev,
    276 							   block->next);
    277 
    278 			block_end = new_block_ptr;
    279 		} else {
    280 			/* cannot fit new block, snip old block from freelist */
    281 			freelist_allocator_freelist_remove(block);
    282 		}
    283 
    284 		/* write out header and return aligned allocation */
    285 		uintptr_t header_ptr = aligned_ptr - sizeof *hdr;
    286 		ASSERT(IS_ALIGNED(header_ptr, alignof(struct freelist_allocator_header)));
    287 
    288 		hdr = (void *) header_ptr;
    289 		hdr->padding = aligned_ptr - block_ptr;
    290 		hdr->size = block_end - aligned_ptr;
    291 
    292 		return (void *) aligned_ptr;
    293 	}
    294 
    295 	return NULL;
    296 }
    297 
    298 inline void
    299 freelist_free(struct freelist_allocator *allocator, void *res)
    300 {
    301 	uintptr_t ptr = (uintptr_t) res;
    302 
    303 	struct freelist_allocator_header *hdr = (void *) (ptr - sizeof *hdr);
    304 	uintptr_t block_ptr = ptr - hdr->padding;
    305 	uintptr_t block_end = ptr + hdr->size;
    306 
    307 	struct freelist_allocator_block *block = (void *) block_ptr;
    308 	block->size = block_end - block_ptr;
    309 
    310 	/* find blocks in freelist list immediately before and after new block */
    311 	struct freelist_allocator_block *freelist = &allocator->freelist, *next, *prev;
    312 	for (next = freelist->next; next != freelist; next = next->next) {
    313 		if (block < next)
    314 			break;
    315 	}
    316 
    317 	prev = next->prev;
    318 
    319 	freelist_allocator_freelist_insert(block, prev, next);
    320 
    321 	/* coalesce blocks if possible */
    322 	if (next != freelist && (uintptr_t) next == block_end) {
    323 		block->size += next->size;
    324 		freelist_allocator_freelist_remove(next);
    325 	}
    326 
    327 	if (prev != freelist && (uintptr_t) prev + prev->size == block_ptr) {
    328 		prev->size += block->size;
    329 		freelist_allocator_freelist_remove(block);
    330 	}
    331 }
    332 
    333 /* rb-tree-based heap allocator
    334  * ---------------------------------------------------------------------------
    335  *  A general-purpose allocator. Supports allocations of arbitrary size, and
    336  *  with power-of-two alignments. Supports unordered frees. Suffers less from
    337  *  internal fragmentation than a freelist implementation. Has bettern average
    338  *  time complexity than a freelist implementation.
    339  */
    340 
    341 struct rbtree_allocator {
    342 	void *ptr;
    343 	size_t cap;
    344 };
    345 
    346 /* buddy allocator
    347  * ---------------------------------------------------------------------------
    348  *  A block-based allocator. Supports allocating out of a tree of memory blocks,
    349  *  each being sized as an order of two multiple of a base block size. Supports
    350  *  coalescing of blocks on free to minimise internal fragmentation.
    351  */
    352 
    353 struct buddy_allocator {
    354 	void *ptr;
    355 	size_t cap;
    356 };
    357 
    358 #endif /* ALLOCATORS_H */
    359 
    360 #ifdef HEADER_IMPL
    361 
    362 extern inline void
    363 arena_reset(struct arena_allocator *allocator);
    364 
    365 extern inline void *
    366 arena_alloc(struct arena_allocator *allocator, size_t size, size_t align);
    367 
    368 extern inline void
    369 stack_reset(struct stack_allocator *allocator);
    370 
    371 extern inline void *
    372 stack_alloc(struct stack_allocator *allocator, size_t size, size_t align);
    373 
    374 extern inline size_t
    375 stack_alloc_padding(void *res);
    376 
    377 extern inline void
    378 stack_free(struct stack_allocator *allocator, void *res);
    379 
    380 extern inline void
    381 pool_allocator_init(struct pool_allocator *allocator, size_t block_size);
    382 
    383 extern inline void *
    384 pool_alloc(struct pool_allocator *allocator, size_t size, size_t align);
    385 
    386 extern inline void
    387 pool_free(struct pool_allocator *allocator, void *res);
    388 
    389 extern inline void
    390 freelist_allocator_freelist_insert(struct freelist_allocator_block *block,
    391 				   struct freelist_allocator_block *prev,
    392 				   struct freelist_allocator_block *next);
    393 
    394 extern inline void
    395 freelist_allocator_freelist_remove(struct freelist_allocator_block *block);
    396 
    397 extern inline void
    398 freelist_allocator_init(struct freelist_allocator *allocator);
    399 
    400 extern inline void *
    401 freelist_alloc(struct freelist_allocator *allocator, size_t size, size_t align);
    402 
    403 extern inline void
    404 freelist_free(struct freelist_allocator *allocator, void *res);
    405 
    406 #endif /* HEADER_IMPL */