c044c8f7c7d1336a89d4e9a48430b9b0bdcc4c1c
[monolithium.git] / crt / src / malloc.c
1 /*
2  * heap.c
3  *
4  * Copyright (C) 2017 Aleksandar Andrejevic <theflash@sdf.lonestar.org>
5  *
6  * This program is free software: you can redistribute it and/or modify
7  * it under the terms of the GNU Affero General Public License as
8  * published by the Free Software Foundation, either version 3 of the
9  * License, or (at your option) any later version.
10  *
11  * This program is distributed in the hope that it will be useful,
12  * but WITHOUT ANY WARRANTY; without even the implied warranty of
13  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
14  * GNU Affero General Public License for more details.
15  *
16  * You should have received a copy of the GNU Affero General Public License
17  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
18  */
19
20 #include <stdlib.h>
21 #include <string.h>
22 #include <monolithium.h>
23
24 #define ALLOCATED (1 << 0)
25
26 typedef struct
27 {
28     uint32_t magic;
29     uint32_t flags;
30     size_t size;
31 } heap_header_t;
32
33 __crt_heap_t *__crt_default_heap = NULL;
34
35 static void __crt_heap_coalesce(__crt_heap_t *heap)
36 {
37     heap_header_t *ptr = (heap_header_t*)heap->base;
38     heap_header_t *previous = NULL;
39
40     while ((uintptr_t)ptr >= (uintptr_t)heap->base && (uintptr_t)ptr < (uintptr_t)heap->base + heap->size)
41     {
42         heap_header_t *next = (heap_header_t*)((uintptr_t)ptr + sizeof(heap_header_t) + ptr->size);
43
44         if (previous && !(previous->flags & ALLOCATED) && !(ptr->flags & ALLOCATED))
45         {
46             if (((uintptr_t)ptr - (uintptr_t)heap->base) == heap->next_offset)
47             {
48                 heap->next_offset = (uintptr_t)previous - (uintptr_t)heap->base;
49             }
50
51             previous->size += ptr->size + sizeof(heap_header_t);
52         }
53         else
54         {
55             previous = ptr;
56         }
57
58         ptr = next;
59     }
60 }
61
62 void *__crt_heap_realloc(__crt_heap_t *heap, void *ptr, size_t alignment, size_t size)
63 {
64     if (!alignment || (alignment & (alignment - 1))) return NULL;
65
66     heap->lock_mutex_proc(heap->mutex);
67
68     if (!(heap->flags & __CRT_HEAP_FLAG_READY))
69     {
70         heap_header_t *header = (heap_header_t*)heap->base;
71         header->magic = heap->magic;
72         header->flags = 0;
73         header->size = heap->size - sizeof(heap_header_t);
74
75         heap->flags |= __CRT_HEAP_FLAG_READY;
76     }
77
78     if (!size)
79     {
80         if (ptr)
81         {
82             heap_header_t *header = (heap_header_t*)((uintptr_t)ptr - sizeof(heap_header_t));
83
84             if (header->flags & ALLOCATED)
85             {
86                 header->flags &= ~ALLOCATED;
87                 __crt_heap_coalesce(heap);
88             }
89             else
90             {
91                 heap->problem(__CRT_HEAP_DOUBLE_FREE);
92             }
93         }
94
95         heap->unlock_mutex_proc(heap->mutex);
96         return NULL;
97     }
98
99     heap_header_t *source_block = NULL;
100
101     if (ptr)
102     {
103         heap_header_t *header = (heap_header_t*)((uintptr_t)ptr - sizeof(heap_header_t));
104
105         if (!(header->flags & ALLOCATED) || header->magic != heap->magic)
106         {
107             heap->problem(__CRT_HEAP_BAD_POINTER);
108             heap->unlock_mutex_proc(heap->mutex);
109             return NULL;
110         }
111
112         if (size > header->size)
113         {
114              heap_header_t *next = (heap_header_t*)((uintptr_t)ptr + header->size);
115
116              if (!(next->flags & ALLOCATED) && (header->size + next->size + sizeof(heap_header_t)) >= size)
117              {
118                  if (((uintptr_t)next - (uintptr_t)heap->base) == heap->next_offset)
119                  {
120                      heap->next_offset = (uintptr_t)header - (uintptr_t)heap->base;
121                  }
122
123                  header->size += next->size + sizeof(heap_header_t);
124                  if (heap->flags & __CRT_HEAP_FLAG_ZEROFILL) memset(next, 0, size - header->size - size);
125              }
126              else
127              {
128                  source_block = header;
129              }
130         }
131
132         if (size < (header->size - sizeof(heap_header_t)))
133         {
134             heap_header_t *new_block = (heap_header_t*)((uintptr_t)ptr + size);
135             new_block->magic = heap->magic;
136             new_block->flags = 0;
137             new_block->size = header->size - size - sizeof(heap_header_t);
138             header->size = size;
139             __crt_heap_coalesce(heap);
140         }
141
142         if (!source_block)
143         {
144             heap->unlock_mutex_proc(heap->mutex);
145             return ptr;
146         }
147     }
148
149     heap_header_t *hole = NULL;
150
151     if (heap->flags & __CRT_HEAP_FLAG_BESTFIT)
152     {
153         heap_header_t *current = (heap_header_t*)heap->base;
154         size_t minimum_size;
155
156         while ((uintptr_t)current < (uintptr_t)heap->base + heap->size)
157         {
158             if (current->magic != heap->magic)
159             {
160                 heap->problem(__CRT_HEAP_CORRUPTED);
161                 heap->unlock_mutex_proc(heap->mutex);
162                 return NULL;
163             }
164
165             if (!(current->flags & ALLOCATED))
166             {
167                 uintptr_t current_start = (uintptr_t)current + sizeof(heap_header_t);
168                 uintptr_t aligned_start = (current_start + alignment - 1) & ~(alignment - 1);
169                 size_t padding = aligned_start - current_start;
170
171                 if (current->size > padding)
172                 {
173                     size_t adjusted_size = current->size - padding;
174
175                     if (adjusted_size >= size && (!hole || adjusted_size < minimum_size))
176                     {
177                         hole = current;
178                         minimum_size = adjusted_size;
179                     }
180                 }
181             }
182
183             heap_header_t *next = (heap_header_t*)((uintptr_t)current + sizeof(heap_header_t) + current->size);
184
185             if ((uintptr_t)next <= (uintptr_t)current)
186             {
187                 heap->problem(__CRT_HEAP_CORRUPTED);
188                 heap->unlock_mutex_proc(heap->mutex);
189                 return NULL;
190             }
191
192             current = next;
193         }
194     }
195     else
196     {
197         uintptr_t offset = heap->next_offset;
198         int cycles = 0;
199
200         do
201         {
202             heap_header_t *current = (heap_header_t*)(heap->base + offset);
203
204             if (current->magic != heap->magic)
205             {
206                 heap->problem(__CRT_HEAP_CORRUPTED);
207                 heap->unlock_mutex_proc(heap->mutex);
208                 return NULL;
209             }
210
211             if (!(current->flags & ALLOCATED))
212             {
213                 uintptr_t current_start = (uintptr_t)current + sizeof(heap_header_t);
214                 uintptr_t aligned_start = (current_start + alignment - 1) & ~(alignment - 1);
215                 size_t padding = aligned_start - current_start;
216
217                 if (current->size >= padding && (current->size - padding) >= size)
218                 {
219                     hole = current;
220                     break;
221                 }
222             }
223
224             offset += sizeof(heap_header_t) + current->size;
225             if (offset > heap->size)
226             {
227                 offset = 0;
228                 if (++cycles > 1)
229                 {
230                     heap->problem(__CRT_HEAP_CORRUPTED);
231                     heap->unlock_mutex_proc(heap->mutex);
232                     return NULL;
233                 }
234             }
235         }
236         while (offset != heap->next_offset);
237
238         heap->next_offset = offset;
239     }
240
241     if (!hole)
242     {
243         heap->problem(__CRT_HEAP_OUT_OF_MEMORY);
244         heap->unlock_mutex_proc(heap->mutex);
245         return NULL;
246     }
247
248     int coalesce = 0;
249     uintptr_t start_address = (uintptr_t)hole + sizeof(heap_header_t);
250     uintptr_t aligned_start = (start_address + alignment - 1) & ~(alignment - 1);
251     size_t padding = aligned_start - start_address;
252
253     if (padding > sizeof(heap_header_t))
254     {
255         heap_header_t *new_block = (heap_header_t*)(aligned_start - sizeof(heap_header_t));
256         new_block->magic = heap->magic;
257         new_block->flags = 0;
258         new_block->size = hole->size - padding;
259         hole->size -= padding + sizeof(heap_header_t);
260
261         hole = new_block;
262         coalesce = 1;
263     }
264     else if (padding)
265     {
266         heap_header_t *previous = (heap_header_t*)heap->base;
267
268         while ((uintptr_t)previous < (uintptr_t)heap->base + heap->size)
269         {
270             heap_header_t *next = (heap_header_t*)((uintptr_t)previous + sizeof(heap_header_t) + previous->size);
271             if (next == hole) break;
272
273             if ((uintptr_t)next <= (uintptr_t)previous)
274             {
275                 heap->problem(__CRT_HEAP_CORRUPTED);
276                 heap->unlock_mutex_proc(heap->mutex);
277                 return NULL;
278             }
279
280             previous = next;
281         }
282
283         if ((uintptr_t)previous < (uintptr_t)heap->base
284             || (uintptr_t)previous >= (uintptr_t)heap->base + heap->size
285             || previous->magic != heap->magic)
286         {
287             heap->problem(__CRT_HEAP_CORRUPTED);
288             heap->unlock_mutex_proc(heap->mutex);
289             return NULL;
290         }
291
292         previous->size += padding;
293
294         heap_header_t *new_block = (heap_header_t*)(aligned_start - sizeof(heap_header_t));
295         memmove(new_block, hole, sizeof(heap_header_t));
296         if (heap->next_offset == (uintptr_t)hole - (uintptr_t)heap->base) heap->next_offset += padding;
297
298         hole = new_block;
299         hole->size -= padding;
300     }
301
302     hole->flags |= ALLOCATED;
303
304     if (hole->size > sizeof(heap_header_t) && size < (hole->size - sizeof(heap_header_t)))
305     {
306         heap_header_t *new_block = (heap_header_t*)((uintptr_t)hole + size + sizeof(heap_header_t));
307         new_block->magic = heap->magic;
308         new_block->flags = 0;
309         new_block->size = hole->size - size - sizeof(heap_header_t);
310         hole->size = size;
311         coalesce = 1;
312     }
313
314     void *destination = (void*)((uintptr_t)hole + sizeof(heap_header_t));
315     if (heap->flags & __CRT_HEAP_FLAG_ZEROFILL) memset(destination, 0, size);
316
317     if (source_block)
318     {
319         void *source = (void*)((uintptr_t)source_block + sizeof(heap_header_t));
320         memcpy(destination, source, source_block->size);
321         source_block->flags &= ~ALLOCATED;
322         coalesce = 1;
323     }
324
325     if (coalesce) __crt_heap_coalesce(heap);
326     heap->unlock_mutex_proc(heap->mutex);
327     return destination;
328 }
329
330 void *aligned_alloc(size_t alignment, size_t size)
331 {
332     return __crt_heap_realloc(__crt_default_heap, NULL, alignment, size);
333 }
334
335 void *realloc(void *ptr, size_t size)
336 {
337     return __crt_heap_realloc(__crt_default_heap, ptr, sizeof(long), size);
338 }
339
340 void *malloc(size_t size)
341 {
342     return __crt_heap_realloc(__crt_default_heap, NULL, sizeof(long), size);
343 }
344
345 void free(void *ptr)
346 {
347     __crt_heap_realloc(__crt_default_heap, ptr, sizeof(long), 0);
348 }
349
350 void *calloc(size_t nmemb, size_t size)
351 {
352     void *ptr = malloc(nmemb * size);
353     if (!ptr) return NULL;
354
355     memset(ptr, 0, nmemb * size);
356     return ptr;
357 }
358
359 static void __crt_heap_lock(void *mutex)
360 {
361     syscall_wait_mutex((handle_t)mutex, NO_TIMEOUT);
362 }
363
364 static void __crt_heap_unlock(void *mutex)
365 {
366     syscall_release_mutex((handle_t)mutex);
367 }
368
369 static void __crt_heap_problem(int problem)
370 {
371     // TODO
372 }
373
374 int __crt_initialize_heap(void)
375 {
376     static __crt_heap_t heap;
377
378     heap.magic             = 0x70616548;
379     heap.base              = NULL;
380     heap.size              = 0x10000000;
381     heap.flags             = 0;
382     heap.next_offset       = 0;
383     heap.lock_mutex_proc   = __crt_heap_lock;
384     heap.unlock_mutex_proc = __crt_heap_unlock;
385     heap.problem           = __crt_heap_problem;
386
387     sysret_t status = syscall_create_mutex(NULL, TRUE, (handle_t*)&heap.mutex);
388     if (status != ERR_SUCCESS) return -1;
389
390     status = syscall_alloc_memory(INVALID_HANDLE, &heap.base, heap.size, MEMORY_BLOCK_ACCESSIBLE | MEMORY_BLOCK_WRITABLE);
391     if (status != ERR_SUCCESS) return -1;
392
393     __crt_default_heap = &heap;
394     return 0;
395 }