diff options
author | Tavian Barnes <tavianator@tavianator.com> | 2023-11-23 13:08:04 -0500 |
---|---|---|
committer | Tavian Barnes <tavianator@tavianator.com> | 2023-11-23 13:56:03 -0500 |
commit | ae18c20d5a585ae4bc1e9ee6859230fee7f73ed8 (patch) | |
tree | 9b83e03091def0002360327a102080e1fd69e6a7 /src/alloc.h | |
parent | f9f43fe44f4a013aac94d5787cf827ec04b4c861 (diff) | |
download | bfs-ae18c20d5a585ae4bc1e9ee6859230fee7f73ed8.tar.xz |
alloc: New helpers for aligned reallocation
There is no aligned_realloc(), so the new xrealloc() function emulates
it by manually reallocating and copying for over-aligned types. The new
REALLOC_ARRAY() and REALLOC_FLEX() macros wrap xrealloc().
Diffstat (limited to 'src/alloc.h')
-rw-r--r-- | src/alloc.h | 24 |
1 files changed, 24 insertions, 0 deletions
diff --git a/src/alloc.h b/src/alloc.h index 15e4983..a6dee99 100644 --- a/src/alloc.h +++ b/src/alloc.h @@ -156,6 +156,30 @@ void *zalloc(size_t align, size_t size); (type *)zalloc(alignof(type), sizeof_flex(type, member, count)) /** + * Alignment-aware realloc(). + * + * @param ptr + * The pointer to reallocate. + * @param align + * The required alignment. + * @param old_size + * The previous allocation size. + * @param new_size + * The new allocation size. + * @return + * The reallocated memory, or NULL on failure. + */ +void *xrealloc(void *ptr, size_t align, size_t old_size, size_t new_size); + +/** Reallocate memory for an array. */ +#define REALLOC_ARRAY(type, ptr, old_count, new_count) \ + (type *)xrealloc((ptr), alignof(type), sizeof_array(type, old_count), sizeof_array(type, new_count)) + +/** Reallocate memory for a flexible struct. */ +#define REALLOC_FLEX(type, member, ptr, old_count, new_count) \ + (type *)xrealloc((ptr), alignof(type), sizeof_flex(type, member, old_count), sizeof_flex(type, member, new_count)) + +/** * An arena allocator for fixed-size types. * * Arena allocators are intentionally not thread safe. |