1//
2//  arena.c
3//  couchstore
4//
5//  Created by Jens Alfke on 4/13/12.
6//  Copyright (c) 2012 Couchbase, Inc. All rights reserved.
7//
8#include "config.h"
9#include "arena.h"
10#include <stdlib.h>
11#include <string.h>
12#include <assert.h>
13#include <stdint.h>
14#include <stdio.h>
15
16#define ALIGNMENT 4                 // Byte alignment of blocks; must be a power of 2
17#define PAGE_SIZE 4096              // Chunk allocation will be rounded to a multiple of this
18#define DEFAULT_CHUNK_SIZE 32768    // Used if 0 is passed to new_arena
19#define LOG_STATS 0                 // Set to 1 to log info about allocations when arenas are freed
20
21typedef struct arena_chunk {
22    struct arena_chunk* prev_chunk; // Link to previous chunk
23    size_t size;                    // Size of available bytes after this header
24} arena_chunk;
25
26
27struct arena {
28    char* next_block;           // Next block to be allocated in cur_chunk (if there's room)
29    char* end;                  // End of the current chunk; can't allocate past here
30    arena_chunk* cur_chunk;     // The current chunk
31    size_t chunk_size;          // The size of chunks to allocate, as passed to new_arena
32#ifdef DEBUG
33    int blocks_allocated;       // Number of blocks allocated
34    size_t bytes_allocated;     // Number of bytes allocated
35#endif
36};
37
38
39// Returns a pointer to the first byte of a chunk's contents.
40static inline void* chunk_start(arena_chunk* chunk)
41{
42    return (char*)chunk + sizeof(arena_chunk);
43}
44
45// Returns a pointer to the byte past the end of a chunk's contents.
46static inline void* chunk_end(arena_chunk* chunk)
47{
48    return (char*)chunk_start(chunk) + chunk->size;
49}
50
51// Allocates a new chunk, attaches it to the arena, and allocates 'size' bytes from it.
52static void* add_chunk(arena* a, size_t size)
53{
54    size_t chunk_size = a->chunk_size;
55    if (size > chunk_size) {
56        chunk_size = size;  // make sure the new chunk is big enough to fit 'size' bytes
57    }
58    arena_chunk* chunk;
59    chunk = static_cast<arena_chunk*>(malloc(sizeof(arena_chunk) + chunk_size));
60    if (!chunk) {
61        return NULL;
62    }
63    chunk->prev_chunk = a->cur_chunk;
64    chunk->size = chunk_size;
65
66    void* result = chunk_start(chunk);
67    a->next_block = (char*)result + size;
68    a->end = static_cast<char*>(chunk_end(chunk));
69    a->cur_chunk = chunk;
70    return result;
71}
72
73
74arena* new_arena(size_t chunk_size)
75{
76    arena* a = static_cast<arena*>(calloc(1, sizeof(arena)));
77    if (a) {
78        if (chunk_size == 0) {
79            chunk_size = DEFAULT_CHUNK_SIZE;
80        } else {
81            chunk_size += sizeof(arena_chunk);
82            chunk_size = (chunk_size + PAGE_SIZE) & ~(PAGE_SIZE - 1);   // round up to multiple
83        }
84        a->chunk_size = chunk_size - sizeof(arena_chunk);
85    }
86    return a;
87}
88
89void delete_arena(arena* a)
90{
91#ifdef DEBUG
92    //assert(a->nblocks == 0);
93    size_t total_allocated = 0;
94#endif
95    arena_chunk* chunk = a->cur_chunk;
96    while (chunk) {
97#ifdef DEBUG
98        total_allocated += chunk->size;
99#endif
100        void* to_free = chunk;
101        chunk = chunk->prev_chunk;
102        free(to_free);
103    }
104#if LOG_STATS
105    fprintf(stderr, "delete_arena: %zd bytes malloced for %zd bytes of data in %d blocks (%.0f%%)\n",
106            total_allocated, a->bytes_allocated, a->blocks_allocated,
107            a->bytes_allocated*100.0/total_allocated);
108#endif
109    free(a);
110}
111
112void* arena_alloc_unaligned(arena* a, size_t size)
113{
114    void* result = a->next_block;
115    a->next_block += size;
116    if (a->next_block > a->end || size > a->chunk_size) {
117        result = add_chunk(a, size);
118    }
119#ifdef DEBUG
120    if (result) {
121        ++a->blocks_allocated;
122        a->bytes_allocated += size;
123    }
124#endif
125    return result;
126}
127
128void* arena_alloc(arena* a, size_t size)
129{
130    int padding = ((intptr_t)a->next_block & (ALIGNMENT - 1));
131    if (padding == 0) {
132        return arena_alloc_unaligned(a, size);
133    }
134    padding = ALIGNMENT - padding;
135    return (char*)arena_alloc_unaligned(a, size + padding) + padding;
136}
137
138void arena_free(arena* a, void* block)
139{
140#ifdef DEBUG
141    if (block) {
142        arena_chunk* chunk;
143        for (chunk = a->cur_chunk; chunk; chunk = chunk->prev_chunk) {
144            if (block >= chunk_start(chunk) && block < chunk_end(chunk)) {
145                break;
146            }
147        }
148        assert(chunk);
149        assert(a->blocks_allocated > 0);
150        --a->blocks_allocated;
151    }
152#else
153    (void)a;
154    (void)block;
155#endif
156}
157
158const arena_position* arena_mark(arena *a)
159{
160    return (const arena_position*) a->next_block;
161}
162
163void arena_free_from_mark(arena *a, const arena_position *mark)
164{
165    arena_chunk* chunk = a->cur_chunk;
166    while (chunk && ((void*)mark < chunk_start(chunk) || (void*)mark > chunk_end(chunk))) {
167        a->cur_chunk = chunk->prev_chunk;
168        free(chunk);
169        chunk = a->cur_chunk;
170    }
171    assert(chunk != NULL || mark == NULL);   // If this fails, mark was bogus
172
173    a->next_block = static_cast<char*>((void*)mark);
174    a->end = static_cast<char*>(chunk ? chunk_end(chunk) : NULL);
175#ifdef DEBUG
176    memset(a->next_block, 0x55, (char*)a->end - (char*)a->next_block);
177    // TODO: Somehow roll back blocks_allocated and bytes_allocated (how?)
178#endif
179}
180
181void arena_free_all(arena *a)
182{
183    arena_free_from_mark(a, NULL);
184}
185