1eb8650a7SLouis Dionne //===----------------------------------------------------------------------===//
2d9edde4aSIgor Kudrin //
357b08b09SChandler Carruth // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
457b08b09SChandler Carruth // See https://llvm.org/LICENSE.txt for license information.
557b08b09SChandler Carruth // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6d9edde4aSIgor Kudrin //
7d9edde4aSIgor Kudrin //===----------------------------------------------------------------------===//
8d9edde4aSIgor Kudrin
9d9edde4aSIgor Kudrin #include "fallback_malloc.h"
10d9edde4aSIgor Kudrin
11*368faacaSLouis Dionne #include <__threading_support>
12996e62eeSPetr Hosek #ifndef _LIBCXXABI_HAS_NO_THREADS
13a9b5fff5SMichał Górny #if defined(__ELF__) && defined(_LIBCXXABI_LINK_PTHREAD_LIB)
14996e62eeSPetr Hosek #pragma comment(lib, "pthread")
15996e62eeSPetr Hosek #endif
16996e62eeSPetr Hosek #endif
17d9edde4aSIgor Kudrin
1804501a22SLouis Dionne #include <stdlib.h> // for malloc, calloc, free
1904501a22SLouis Dionne #include <string.h> // for memset
20a78aaa1aSLouis Dionne #include <new> // for std::__libcpp_aligned_{alloc,free}
21d9edde4aSIgor Kudrin
22d9edde4aSIgor Kudrin // A small, simple heap manager based (loosely) on
23d9edde4aSIgor Kudrin // the startup heap manager from FreeBSD, optimized for space.
24d9edde4aSIgor Kudrin //
25d9edde4aSIgor Kudrin // Manages a fixed-size memory pool, supports malloc and free only.
26d9edde4aSIgor Kudrin // No support for realloc.
27d9edde4aSIgor Kudrin //
28d9edde4aSIgor Kudrin // Allocates chunks in multiples of four bytes, with a four byte header
29d9edde4aSIgor Kudrin // for each chunk. The overhead of each chunk is kept low by keeping pointers
30d9edde4aSIgor Kudrin // as two byte offsets within the heap, rather than (4 or 8 byte) pointers.
31d9edde4aSIgor Kudrin
32d9edde4aSIgor Kudrin namespace {
33d9edde4aSIgor Kudrin
34d9edde4aSIgor Kudrin // When POSIX threads are not available, make the mutex operations a nop
35d9edde4aSIgor Kudrin #ifndef _LIBCXXABI_HAS_NO_THREADS
3605337a75SArthur O'Dwyer static _LIBCPP_CONSTINIT std::__libcpp_mutex_t heap_mutex = _LIBCPP_MUTEX_INITIALIZER;
37d9edde4aSIgor Kudrin #else
3805337a75SArthur O'Dwyer static _LIBCPP_CONSTINIT void* heap_mutex = 0;
39d9edde4aSIgor Kudrin #endif
40d9edde4aSIgor Kudrin
41d9edde4aSIgor Kudrin class mutexor {
42d9edde4aSIgor Kudrin public:
43d9edde4aSIgor Kudrin #ifndef _LIBCXXABI_HAS_NO_THREADS
mutexor(std::__libcpp_mutex_t * m)4497ba9faeSAsiri Rathnayake mutexor(std::__libcpp_mutex_t* m) : mtx_(m) {
4597ba9faeSAsiri Rathnayake std::__libcpp_mutex_lock(mtx_);
4697ba9faeSAsiri Rathnayake }
~mutexor()4797ba9faeSAsiri Rathnayake ~mutexor() { std::__libcpp_mutex_unlock(mtx_); }
48d9edde4aSIgor Kudrin #else
49d9edde4aSIgor Kudrin mutexor(void*) {}
50d9edde4aSIgor Kudrin ~mutexor() {}
51d9edde4aSIgor Kudrin #endif
52d9edde4aSIgor Kudrin private:
53d9edde4aSIgor Kudrin mutexor(const mutexor& rhs);
54d9edde4aSIgor Kudrin mutexor& operator=(const mutexor& rhs);
55d9edde4aSIgor Kudrin #ifndef _LIBCXXABI_HAS_NO_THREADS
5697ba9faeSAsiri Rathnayake std::__libcpp_mutex_t* mtx_;
57d9edde4aSIgor Kudrin #endif
58d9edde4aSIgor Kudrin };
59d9edde4aSIgor Kudrin
60d9edde4aSIgor Kudrin static const size_t HEAP_SIZE = 512;
61d9edde4aSIgor Kudrin char heap[HEAP_SIZE] __attribute__((aligned));
62d9edde4aSIgor Kudrin
63d9edde4aSIgor Kudrin typedef unsigned short heap_offset;
64d9edde4aSIgor Kudrin typedef unsigned short heap_size;
65d9edde4aSIgor Kudrin
66d9edde4aSIgor Kudrin struct heap_node {
67d9edde4aSIgor Kudrin heap_offset next_node; // offset into heap
68d9edde4aSIgor Kudrin heap_size len; // size in units of "sizeof(heap_node)"
69d9edde4aSIgor Kudrin };
70d9edde4aSIgor Kudrin
718524cbaaSEric Fiselier static const heap_node* list_end =
728524cbaaSEric Fiselier (heap_node*)(&heap[HEAP_SIZE]); // one past the end of the heap
73d9edde4aSIgor Kudrin static heap_node* freelist = NULL;
74d9edde4aSIgor Kudrin
node_from_offset(const heap_offset offset)758524cbaaSEric Fiselier heap_node* node_from_offset(const heap_offset offset) {
768524cbaaSEric Fiselier return (heap_node*)(heap + (offset * sizeof(heap_node)));
778524cbaaSEric Fiselier }
78d9edde4aSIgor Kudrin
offset_from_node(const heap_node * ptr)798524cbaaSEric Fiselier heap_offset offset_from_node(const heap_node* ptr) {
808524cbaaSEric Fiselier return static_cast<heap_offset>(
818524cbaaSEric Fiselier static_cast<size_t>(reinterpret_cast<const char*>(ptr) - heap) /
828524cbaaSEric Fiselier sizeof(heap_node));
838524cbaaSEric Fiselier }
84d9edde4aSIgor Kudrin
init_heap()85d9edde4aSIgor Kudrin void init_heap() {
86d9edde4aSIgor Kudrin freelist = (heap_node*)heap;
87d9edde4aSIgor Kudrin freelist->next_node = offset_from_node(list_end);
88d9edde4aSIgor Kudrin freelist->len = HEAP_SIZE / sizeof(heap_node);
89d9edde4aSIgor Kudrin }
90d9edde4aSIgor Kudrin
91d9edde4aSIgor Kudrin // How big a chunk we allocate
alloc_size(size_t len)928524cbaaSEric Fiselier size_t alloc_size(size_t len) {
938524cbaaSEric Fiselier return (len + sizeof(heap_node) - 1) / sizeof(heap_node) + 1;
948524cbaaSEric Fiselier }
95d9edde4aSIgor Kudrin
is_fallback_ptr(void * ptr)968524cbaaSEric Fiselier bool is_fallback_ptr(void* ptr) {
978524cbaaSEric Fiselier return ptr >= heap && ptr < (heap + HEAP_SIZE);
988524cbaaSEric Fiselier }
99d9edde4aSIgor Kudrin
fallback_malloc(size_t len)100d9edde4aSIgor Kudrin void* fallback_malloc(size_t len) {
101d9edde4aSIgor Kudrin heap_node *p, *prev;
102d9edde4aSIgor Kudrin const size_t nelems = alloc_size(len);
103d9edde4aSIgor Kudrin mutexor mtx(&heap_mutex);
104d9edde4aSIgor Kudrin
105d9edde4aSIgor Kudrin if (NULL == freelist)
106d9edde4aSIgor Kudrin init_heap();
107d9edde4aSIgor Kudrin
108d9edde4aSIgor Kudrin // Walk the free list, looking for a "big enough" chunk
1098524cbaaSEric Fiselier for (p = freelist, prev = 0; p && p != list_end;
1108524cbaaSEric Fiselier prev = p, p = node_from_offset(p->next_node)) {
111d9edde4aSIgor Kudrin
112d9edde4aSIgor Kudrin if (p->len > nelems) { // chunk is larger, shorten, and return the tail
113d9edde4aSIgor Kudrin heap_node* q;
114d9edde4aSIgor Kudrin
115d9edde4aSIgor Kudrin p->len = static_cast<heap_size>(p->len - nelems);
116d9edde4aSIgor Kudrin q = p + p->len;
117d9edde4aSIgor Kudrin q->next_node = 0;
118d9edde4aSIgor Kudrin q->len = static_cast<heap_size>(nelems);
119d9edde4aSIgor Kudrin return (void*)(q + 1);
120d9edde4aSIgor Kudrin }
121d9edde4aSIgor Kudrin
122d9edde4aSIgor Kudrin if (p->len == nelems) { // exact size match
123d9edde4aSIgor Kudrin if (prev == 0)
124d9edde4aSIgor Kudrin freelist = node_from_offset(p->next_node);
125d9edde4aSIgor Kudrin else
126d9edde4aSIgor Kudrin prev->next_node = p->next_node;
127d9edde4aSIgor Kudrin p->next_node = 0;
128d9edde4aSIgor Kudrin return (void*)(p + 1);
129d9edde4aSIgor Kudrin }
130d9edde4aSIgor Kudrin }
131d9edde4aSIgor Kudrin return NULL; // couldn't find a spot big enough
132d9edde4aSIgor Kudrin }
133d9edde4aSIgor Kudrin
134d9edde4aSIgor Kudrin // Return the start of the next block
after(struct heap_node * p)135d9edde4aSIgor Kudrin heap_node* after(struct heap_node* p) { return p + p->len; }
136d9edde4aSIgor Kudrin
fallback_free(void * ptr)137d9edde4aSIgor Kudrin void fallback_free(void* ptr) {
138d9edde4aSIgor Kudrin struct heap_node* cp = ((struct heap_node*)ptr) - 1; // retrieve the chunk
139d9edde4aSIgor Kudrin struct heap_node *p, *prev;
140d9edde4aSIgor Kudrin
141d9edde4aSIgor Kudrin mutexor mtx(&heap_mutex);
142d9edde4aSIgor Kudrin
143d9edde4aSIgor Kudrin #ifdef DEBUG_FALLBACK_MALLOC
144cc69d211SLouis Dionne std::printf("Freeing item at %d of size %d\n", offset_from_node(cp), cp->len);
145d9edde4aSIgor Kudrin #endif
146d9edde4aSIgor Kudrin
1478524cbaaSEric Fiselier for (p = freelist, prev = 0; p && p != list_end;
1488524cbaaSEric Fiselier prev = p, p = node_from_offset(p->next_node)) {
149d9edde4aSIgor Kudrin #ifdef DEBUG_FALLBACK_MALLOC
150cc69d211SLouis Dionne std::printf(" p=%d, cp=%d, after(p)=%d, after(cp)=%d\n",
151cc69d211SLouis Dionne offset_from_node(p), offset_from_node(cp),
152cc69d211SLouis Dionne offset_from_node(after(p)), offset_from_node(after(cp)));
153d9edde4aSIgor Kudrin #endif
154d9edde4aSIgor Kudrin if (after(p) == cp) {
155d9edde4aSIgor Kudrin #ifdef DEBUG_FALLBACK_MALLOC
156cc69d211SLouis Dionne std::printf(" Appending onto chunk at %d\n", offset_from_node(p));
157d9edde4aSIgor Kudrin #endif
1588524cbaaSEric Fiselier p->len = static_cast<heap_size>(
1598524cbaaSEric Fiselier p->len + cp->len); // make the free heap_node larger
160d9edde4aSIgor Kudrin return;
1618524cbaaSEric Fiselier } else if (after(cp) == p) { // there's a free heap_node right after
162d9edde4aSIgor Kudrin #ifdef DEBUG_FALLBACK_MALLOC
163cc69d211SLouis Dionne std::printf(" Appending free chunk at %d\n", offset_from_node(p));
164d9edde4aSIgor Kudrin #endif
165d9edde4aSIgor Kudrin cp->len = static_cast<heap_size>(cp->len + p->len);
166d9edde4aSIgor Kudrin if (prev == 0) {
167d9edde4aSIgor Kudrin freelist = cp;
168d9edde4aSIgor Kudrin cp->next_node = p->next_node;
1698524cbaaSEric Fiselier } else
170d9edde4aSIgor Kudrin prev->next_node = offset_from_node(cp);
171d9edde4aSIgor Kudrin return;
172d9edde4aSIgor Kudrin }
173d9edde4aSIgor Kudrin }
174d9edde4aSIgor Kudrin // Nothing to merge with, add it to the start of the free list
175d9edde4aSIgor Kudrin #ifdef DEBUG_FALLBACK_MALLOC
176cc69d211SLouis Dionne std::printf(" Making new free list entry %d\n", offset_from_node(cp));
177d9edde4aSIgor Kudrin #endif
178d9edde4aSIgor Kudrin cp->next_node = offset_from_node(freelist);
179d9edde4aSIgor Kudrin freelist = cp;
180d9edde4aSIgor Kudrin }
181d9edde4aSIgor Kudrin
182d9edde4aSIgor Kudrin #ifdef INSTRUMENT_FALLBACK_MALLOC
print_free_list()183d9edde4aSIgor Kudrin size_t print_free_list() {
184d9edde4aSIgor Kudrin struct heap_node *p, *prev;
185d9edde4aSIgor Kudrin heap_size total_free = 0;
186d9edde4aSIgor Kudrin if (NULL == freelist)
187d9edde4aSIgor Kudrin init_heap();
188d9edde4aSIgor Kudrin
1898524cbaaSEric Fiselier for (p = freelist, prev = 0; p && p != list_end;
1908524cbaaSEric Fiselier prev = p, p = node_from_offset(p->next_node)) {
191cc69d211SLouis Dionne std::printf("%sOffset: %d\tsize: %d Next: %d\n",
192cc69d211SLouis Dionne (prev == 0 ? "" : " "), offset_from_node(p), p->len, p->next_node);
193d9edde4aSIgor Kudrin total_free += p->len;
194d9edde4aSIgor Kudrin }
195cc69d211SLouis Dionne std::printf("Total Free space: %d\n", total_free);
196d9edde4aSIgor Kudrin return total_free;
197d9edde4aSIgor Kudrin }
198d9edde4aSIgor Kudrin #endif
199d9edde4aSIgor Kudrin } // end unnamed namespace
200d9edde4aSIgor Kudrin
201d9edde4aSIgor Kudrin namespace __cxxabiv1 {
202d9edde4aSIgor Kudrin
203c74a2e12SEric Fiselier struct __attribute__((aligned)) __aligned_type {};
204c74a2e12SEric Fiselier
__aligned_malloc_with_fallback(size_t size)205c74a2e12SEric Fiselier void* __aligned_malloc_with_fallback(size_t size) {
206c74a2e12SEric Fiselier #if defined(_WIN32)
207a78aaa1aSLouis Dionne if (void* dest = std::__libcpp_aligned_alloc(alignof(__aligned_type), size))
208c74a2e12SEric Fiselier return dest;
20951fbb2e7SEric Fiselier #elif defined(_LIBCPP_HAS_NO_LIBRARY_ALIGNED_ALLOCATION)
21004501a22SLouis Dionne if (void* dest = ::malloc(size))
211c74a2e12SEric Fiselier return dest;
212c74a2e12SEric Fiselier #else
213c74a2e12SEric Fiselier if (size == 0)
214c74a2e12SEric Fiselier size = 1;
215a78aaa1aSLouis Dionne if (void* dest = std::__libcpp_aligned_alloc(__alignof(__aligned_type), size))
216c74a2e12SEric Fiselier return dest;
217c74a2e12SEric Fiselier #endif
218c74a2e12SEric Fiselier return fallback_malloc(size);
219d9edde4aSIgor Kudrin }
220d9edde4aSIgor Kudrin
__calloc_with_fallback(size_t count,size_t size)221d9edde4aSIgor Kudrin void* __calloc_with_fallback(size_t count, size_t size) {
22204501a22SLouis Dionne void* ptr = ::calloc(count, size);
223d9edde4aSIgor Kudrin if (NULL != ptr)
224d9edde4aSIgor Kudrin return ptr;
225d9edde4aSIgor Kudrin // if calloc fails, fall back to emergency stash
226d9edde4aSIgor Kudrin ptr = fallback_malloc(size * count);
227d9edde4aSIgor Kudrin if (NULL != ptr)
22804501a22SLouis Dionne ::memset(ptr, 0, size * count);
229d9edde4aSIgor Kudrin return ptr;
230d9edde4aSIgor Kudrin }
231d9edde4aSIgor Kudrin
__aligned_free_with_fallback(void * ptr)232c74a2e12SEric Fiselier void __aligned_free_with_fallback(void* ptr) {
233c74a2e12SEric Fiselier if (is_fallback_ptr(ptr))
234c74a2e12SEric Fiselier fallback_free(ptr);
235c74a2e12SEric Fiselier else {
236d67e58f2SLouis Dionne #if defined(_LIBCPP_HAS_NO_LIBRARY_ALIGNED_ALLOCATION)
237d67e58f2SLouis Dionne ::free(ptr);
238d67e58f2SLouis Dionne #else
239a78aaa1aSLouis Dionne std::__libcpp_aligned_free(ptr);
240d67e58f2SLouis Dionne #endif
241c74a2e12SEric Fiselier }
242c74a2e12SEric Fiselier }
243c74a2e12SEric Fiselier
__free_with_fallback(void * ptr)244d9edde4aSIgor Kudrin void __free_with_fallback(void* ptr) {
245d9edde4aSIgor Kudrin if (is_fallback_ptr(ptr))
246d9edde4aSIgor Kudrin fallback_free(ptr);
247d9edde4aSIgor Kudrin else
24804501a22SLouis Dionne ::free(ptr);
249d9edde4aSIgor Kudrin }
250d9edde4aSIgor Kudrin
251d9edde4aSIgor Kudrin } // namespace __cxxabiv1
252