summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--src/alloc.c65
-rw-r--r--src/alloc.h40
-rw-r--r--tests/alloc.c61
3 files changed, 166 insertions, 0 deletions
diff --git a/src/alloc.c b/src/alloc.c
index d3923b2..9bc7c43 100644
--- a/src/alloc.c
+++ b/src/alloc.c
@@ -286,6 +286,17 @@ static size_t fenwick_node(const struct slab *slab, size_t i) {
return ret;
}
+/** Query the Fenwick tree. Returns sum(bits[0:N*i]). */
+static size_t fenwick_query(const struct slab *slab, size_t i) {
+ // (i & -i) isolates the least-significant bit in i.
+ // https://en.wikipedia.org/wiki/Fenwick_tree#The_interrogation_tree
+ size_t ret = 0;
+ for (; i > 0; i -= i & -i) {
+ ret += fenwick_node(slab, i);
+ }
+ return ret;
+}
+
/** Update the Fenwick tree. */
static void fenwick_update(struct slab *slab, size_t i, ptrdiff_t delta) {
size_t length = slab_length(slab);
@@ -321,6 +332,25 @@ static size_t fenwick_search_free(struct slab *slab) {
return low;
}
+/** Binary search the Fenwick tree for a specific rank. */
+static size_t fenwick_search_next(struct slab *slab, size_t n) {
+ size_t low = 0;
+ size_t bit = slab_length(slab) + 1;
+ bfs_assert(has_single_bit(bit));
+
+ do {
+ bit >>= 1;
+ size_t mid = low + bit;
+ size_t node = fenwick_node(slab, mid);
+ if (node <= n) {
+ low = mid;
+ n -= node;
+ }
+ } while (bit > 1);
+
+ return low;
+}
+
/** Get the chunk for a bitmap index. */
static void *nth_chunk(struct slab *slab, size_t i, size_t j) {
bfs_assert(i < slab_length(slab));
@@ -367,6 +397,41 @@ static void slab_free(struct slab *slab, void *ptr) {
poison(ptr, slab_size(slab));
}
+void *slab_next(struct slab *slab, void *ptr) {
+ // Find an allocated chunk after the given pointer
+ size_t min = 0;
+ if (ptr) {
+ min = 1 + chunk_index(slab, ptr);
+ }
+
+ size_t i = min / SIZE_WIDTH;
+ size_t j = min % SIZE_WIDTH;
+ size_t length = slab_length(slab);
+ if (i >= length) {
+ return NULL;
+ }
+
+ // Check for a 1 at bit j or higher
+ size_t word = bitmap_word(slab, i);
+ size_t bit = (size_t)1 << j;
+ j = trailing_zeros(word & -bit);
+
+ if (j >= SIZE_WIDTH) {
+ // None in the same word, query the Fenwick tree
+ size_t rank = fenwick_query(slab, i + 1);
+ i = fenwick_search_next(slab, rank);
+ if (i >= length) {
+ return NULL;
+ }
+
+ word = bitmap_word(slab, i);
+ j = trailing_zeros(word);
+ bfs_assert(j < SIZE_WIDTH);
+ }
+
+ return nth_chunk(slab, i, j);
+}
+
/** Free a whole slab. */
static void slab_destroy(struct slab *slab) {
unpoison(&slab->chunks);
diff --git a/src/alloc.h b/src/alloc.h
index af8b391..4d68111 100644
--- a/src/alloc.h
+++ b/src/alloc.h
@@ -284,6 +284,29 @@ void arena_clear(struct arena *arena);
void arena_destroy(struct arena *arena);
/**
+ * Loop over every allocated object in an arena.
+ *
+ * @type
+ * The object type.
+ * @item
+ * The induction variable name.
+ * @arena
+ * The arena to loop over.
+ */
+#define for_arena(type, item, arena) \
+ for_arena_ (type, _i_##item, item, (arena))
+
+#define for_arena_(type, i, item, arena) \
+ for (size_t i = 0; i < arena->nslabs; ++i) \
+ for (type *item = NULL; (item = slab_next(arena->slabs[i], item));)
+
+/**
+ * @internal
+ * Get the next allocated object in a slab.
+ */
+void *slab_next(struct slab *slab, void *ptr);
+
+/**
* An arena allocator for flexibly-sized types.
*/
struct varena {
@@ -392,4 +415,21 @@ void varena_clear(struct varena *varena);
*/
void varena_destroy(struct varena *varena);
+/**
+ * Loop over every allocated object in a varena.
+ *
+ * @type
+ * The object type.
+ * @item
+ * The induction variable name.
+ * @varena
+ * The varena to loop over.
+ */
+#define for_varena(type, item, varena) \
+ for_varena_ (type, _j_##item, item, (varena))
+
+#define for_varena_(type, j, item, varena) \
+ for (size_t j = 0; j < varena->narenas; ++j) \
+ for_arena (type, item, &varena->arenas[j])
+
#endif // BFS_ALLOC_H
diff --git a/tests/alloc.c b/tests/alloc.c
index 5eb4e9e..defdac2 100644
--- a/tests/alloc.c
+++ b/tests/alloc.c
@@ -4,12 +4,70 @@
#include "tests.h"
#include "alloc.h"
+#include "bit.h"
#include "diag.h"
#include <errno.h>
#include <stdlib.h>
#include <stdint.h>
+/** Check for_arena() iteration. */
+static void check_for_arena(void) {
+ // Check all 2^bits patterns of allocated/free objects. Every 2 bits of
+ // the pattern corresponds to a different chunk type:
+ //
+ // 0b00: 000...000
+ // 0b01: 100...000
+ // 0b10: 011...111
+ // 0b11: 111...111
+ const size_t bits = 8;
+ const size_t patterns = 1 << bits;
+ const size_t chunk = SIZE_WIDTH;
+ const size_t count = chunk * bits;
+
+ int **ptrs = ALLOC_ARRAY(int *, count);
+ bfs_everify(ptrs);
+
+ struct arena arena;
+ ARENA_INIT(&arena, int);
+
+ for (size_t mask = 0; mask < patterns; ++mask) {
+ arena_clear(&arena);
+
+ // Allocate count objects
+ for (size_t i = 0; i < count; ++i) {
+ ptrs[i] = arena_alloc(&arena);
+ bfs_everify(ptrs[i]);
+ *ptrs[i] = i;
+ }
+
+ // Create holes according to the mask
+ size_t remaining = count;
+ for (size_t bit = 0; bit < bits; bit += 2) {
+ size_t start = chunk * bit / 2;
+ size_t end = start + chunk;
+ for (size_t i = start; i < end; ++i) {
+ bool keep = (mask >> bit) & (i == start ? 0x1 : 0x2);
+ if (!keep) {
+ arena_free(&arena, ptrs[i]);
+ ptrs[i] = NULL;
+ --remaining;
+ }
+ }
+ }
+
+ // Check the remaining objects
+ for_arena (int, p, &arena) {
+ bfs_check(ptrs[*p] == p);
+ --remaining;
+ }
+ bfs_check(remaining == 0);
+ }
+
+ arena_destroy(&arena);
+ free(ptrs);
+}
+
struct flexible {
alignas(64) int foo[8];
int bar[];
@@ -54,6 +112,9 @@ void check_alloc(void) {
bfs_check(ALLOC_FLEX(struct flexible, bar, too_many) == NULL && errno == EOVERFLOW);
bfs_check(ZALLOC_FLEX(struct flexible, bar, too_many) == NULL && errno == EOVERFLOW);
+ // arena tests
+ check_for_arena();
+
// varena tests
struct varena varena;
VARENA_INIT(&varena, struct flexible, bar);