xref: /6.0.3/couchstore/src/arena.cc (revision 13d4a251)
1 //
2 //  arena.c
3 //  couchstore
4 //
5 //  Created by Jens Alfke on 4/13/12.
6 //  Copyright (c) 2012 Couchbase, Inc. All rights reserved.
7 //
8 #include "config.h"
9 #include "arena.h"
10 #include <stdlib.h>
11 #include <string.h>
12 #include <platform/cb_malloc.h>
13 #include <stdint.h>
14 #include <stdio.h>
15 #include <platform/cbassert.h>
16 
17 #define ALIGNMENT 4                 // Byte alignment of blocks; must be a power of 2
18 #define PAGE_SIZE 4096              // Chunk allocation will be rounded to a multiple of this
19 #define DEFAULT_CHUNK_SIZE 32768    // Used if 0 is passed to new_arena
20 #define LOG_STATS 0                 // Set to 1 to log info about allocations when arenas are freed
21 
22 typedef struct arena_chunk {
23     struct arena_chunk* prev_chunk; // Link to previous chunk
24     size_t size;                    // Size of available bytes after this header
25 } arena_chunk;
26 
27 
28 struct arena {
29     char* next_block;           // Next block to be allocated in cur_chunk (if there's room)
30     char* end;                  // End of the current chunk; can't allocate past here
31     arena_chunk* cur_chunk;     // The current chunk
32     size_t chunk_size;          // The size of chunks to allocate, as passed to new_arena
33 #ifdef DEBUG
34     int blocks_allocated;       // Number of blocks allocated
35     size_t bytes_allocated;     // Number of bytes allocated
36 #endif
37 };
38 
39 
40 // Returns a pointer to the first byte of a chunk's contents.
chunk_start(arena_chunk * chunk)41 static inline void* chunk_start(arena_chunk* chunk)
42 {
43     return (char*)chunk + sizeof(arena_chunk);
44 }
45 
46 // Returns a pointer to the byte past the end of a chunk's contents.
chunk_end(arena_chunk * chunk)47 static inline void* chunk_end(arena_chunk* chunk)
48 {
49     return (char*)chunk_start(chunk) + chunk->size;
50 }
51 
52 // Allocates a new chunk, attaches it to the arena, and allocates 'size' bytes from it.
add_chunk(arena * a,size_t size)53 static void* add_chunk(arena* a, size_t size)
54 {
55     size_t chunk_size = a->chunk_size;
56     if (size > chunk_size) {
57         chunk_size = size;  // make sure the new chunk is big enough to fit 'size' bytes
58     }
59     arena_chunk* chunk;
60     chunk = static_cast<arena_chunk*>(cb_malloc(sizeof(arena_chunk) + chunk_size));
61     if (!chunk) {
62         return NULL;
63     }
64     chunk->prev_chunk = a->cur_chunk;
65     chunk->size = chunk_size;
66 
67     void* result = chunk_start(chunk);
68     a->next_block = (char*)result + size;
69     a->end = static_cast<char*>(chunk_end(chunk));
70     a->cur_chunk = chunk;
71     return result;
72 }
73 
74 
new_arena(size_t chunk_size)75 arena* new_arena(size_t chunk_size)
76 {
77     arena* a = static_cast<arena*>(cb_calloc(1, sizeof(arena)));
78     if (a) {
79         if (chunk_size == 0) {
80             chunk_size = DEFAULT_CHUNK_SIZE;
81         } else {
82             chunk_size += sizeof(arena_chunk);
83             chunk_size = (chunk_size + PAGE_SIZE) & ~(PAGE_SIZE - 1);   // round up to multiple
84         }
85         a->chunk_size = chunk_size - sizeof(arena_chunk);
86     }
87     return a;
88 }
89 
delete_arena(arena * a)90 void delete_arena(arena* a)
91 {
92 #ifdef DEBUG
93     size_t total_allocated = 0;
94 #endif
95     arena_chunk* chunk = a->cur_chunk;
96     while (chunk) {
97 #ifdef DEBUG
98         total_allocated += chunk->size;
99 #endif
100         void* to_free = chunk;
101         chunk = chunk->prev_chunk;
102         cb_free(to_free);
103     }
104 #if LOG_STATS
105     fprintf(stderr, "delete_arena: %zd bytes malloced for %zd bytes of data in %d blocks (%.0f%%)\n",
106             total_allocated, a->bytes_allocated, a->blocks_allocated,
107             a->bytes_allocated*100.0/total_allocated);
108 #endif
109     cb_free(a);
110 }
111 
arena_alloc_unaligned(arena * a,size_t size)112 void* arena_alloc_unaligned(arena* a, size_t size)
113 {
114     void* result = a->next_block;
115     a->next_block += size;
116     if (a->next_block > a->end || size > a->chunk_size) {
117         result = add_chunk(a, size);
118     }
119 #ifdef DEBUG
120     if (result) {
121         ++a->blocks_allocated;
122         a->bytes_allocated += size;
123     }
124 #endif
125     return result;
126 }
127 
arena_alloc(arena * a,size_t size)128 void* arena_alloc(arena* a, size_t size)
129 {
130     int padding = ((intptr_t)a->next_block & (ALIGNMENT - 1));
131     if (padding == 0) {
132         return arena_alloc_unaligned(a, size);
133     }
134     padding = ALIGNMENT - padding;
135     return (char*)arena_alloc_unaligned(a, size + padding) + padding;
136 }
137 
arena_free(arena * a,void * block)138 void arena_free(arena* a, void* block)
139 {
140 #ifdef DEBUG
141     if (block) {
142         arena_chunk* chunk;
143         for (chunk = a->cur_chunk; chunk; chunk = chunk->prev_chunk) {
144             if (block >= chunk_start(chunk) && block < chunk_end(chunk)) {
145                 break;
146             }
147         }
148         cb_assert(chunk);
149         cb_assert(a->blocks_allocated > 0);
150         --a->blocks_allocated;
151     }
152 #else
153     (void)a;
154     (void)block;
155 #endif
156 }
157 
arena_mark(arena * a)158 const arena_position* arena_mark(arena *a)
159 {
160     return (const arena_position*) a->next_block;
161 }
162 
arena_free_from_mark(arena * a,const arena_position * mark)163 void arena_free_from_mark(arena *a, const arena_position *mark)
164 {
165     arena_chunk* chunk = a->cur_chunk;
166     while (chunk && ((void*)mark < chunk_start(chunk) || (void*)mark > chunk_end(chunk))) {
167         a->cur_chunk = chunk->prev_chunk;
168         cb_free(chunk);
169         chunk = a->cur_chunk;
170     }
171     cb_assert(chunk != NULL || mark == NULL);   // If this fails, mark was bogus
172 
173     a->next_block = static_cast<char*>((void*)mark);
174     a->end = static_cast<char*>(chunk ? chunk_end(chunk) : NULL);
175 #ifdef DEBUG
176     memset(a->next_block, 0x55, (char*)a->end - (char*)a->next_block);
177     // TODO: Somehow roll back blocks_allocated and bytes_allocated (how?)
178 #endif
179 }
180 
arena_free_all(arena * a)181 void arena_free_all(arena *a)
182 {
183     arena_free_from_mark(a, NULL);
184 }
185