allocator.hpp 8.0 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297
  1. #pragma once
  2. #include <utility>
  3. #include <assert.h>
  4. #include <stdint.h>
  5. #include <types/cplusplus.hpp>
  6. #include <types/types.h>
  7. constexpr void* operator new(size_t, void* ptr)
  8. {
  9. return ptr;
  10. }
  11. namespace types {
  12. namespace __allocator {
  13. class brk_memory_allocator {
  14. public:
  15. using byte = uint8_t;
  16. using size_type = size_t;
  17. struct mem_blk_flags {
  18. uint8_t is_free;
  19. uint8_t has_next;
  20. uint8_t _unused2;
  21. uint8_t _unused3;
  22. };
  23. struct mem_blk {
  24. size_t size;
  25. struct mem_blk_flags flags;
  26. // the first byte of the memory space
  27. // the minimal allocated space is 4 bytes
  28. uint8_t data[4];
  29. };
  30. private:
  31. byte* p_start;
  32. byte* p_break;
  33. byte* p_limit;
  34. brk_memory_allocator(void) = delete;
  35. brk_memory_allocator(const brk_memory_allocator&) = delete;
  36. brk_memory_allocator(brk_memory_allocator&&) = delete;
  37. constexpr int brk(byte* addr)
  38. {
  39. if (unlikely(addr >= p_limit))
  40. return GB_FAILED;
  41. p_break = addr;
  42. return GB_OK;
  43. }
  44. // sets errno
  45. inline byte* sbrk(size_type increment)
  46. {
  47. if (unlikely(brk(p_break + increment) != GB_OK))
  48. return nullptr;
  49. else
  50. return p_break;
  51. }
  52. inline mem_blk* _find_next_mem_blk(mem_blk* blk, size_type blk_size)
  53. {
  54. byte* p = (byte*)blk;
  55. p += sizeof(mem_blk);
  56. p += blk_size;
  57. p -= (4 * sizeof(byte));
  58. return (mem_blk*)p;
  59. }
  60. // sets errno
  61. // @param start_pos position where to start finding
  62. // @param size the size of the block we're looking for
  63. // @return found block if suitable block exists, if not, the last block
  64. inline mem_blk* find_blk(mem_blk* start_pos, size_type size)
  65. {
  66. while (1) {
  67. if (start_pos->flags.is_free && start_pos->size >= size) {
  68. return start_pos;
  69. } else {
  70. if (unlikely(!start_pos->flags.has_next))
  71. return start_pos;
  72. start_pos = _find_next_mem_blk(start_pos, start_pos->size);
  73. }
  74. }
  75. }
  76. inline mem_blk* allocate_new_block(mem_blk* blk_before, size_type size)
  77. {
  78. auto ret = sbrk(sizeof(mem_blk) + size - 4 * sizeof(byte));
  79. if (!ret)
  80. return nullptr;
  81. mem_blk* blk = _find_next_mem_blk(blk_before, blk_before->size);
  82. blk_before->flags.has_next = 1;
  83. blk->flags.has_next = 0;
  84. blk->flags.is_free = 1;
  85. blk->size = size;
  86. return blk;
  87. }
  88. inline void split_block(mem_blk* blk, size_type this_size)
  89. {
  90. // block is too small to get split
  91. if (blk->size < sizeof(mem_blk) + this_size) {
  92. return;
  93. }
  94. mem_blk* blk_next = _find_next_mem_blk(blk, this_size);
  95. blk_next->size = blk->size
  96. - this_size
  97. - sizeof(mem_blk)
  98. + 4 * sizeof(byte);
  99. blk_next->flags.has_next = blk->flags.has_next;
  100. blk_next->flags.is_free = 1;
  101. blk->flags.has_next = 1;
  102. blk->size = this_size;
  103. }
  104. public:
  105. inline brk_memory_allocator(byte* start, size_type limit)
  106. : p_start(start)
  107. , p_limit(p_start + limit)
  108. {
  109. brk(p_start);
  110. mem_blk* p_blk = (mem_blk*)sbrk(0);
  111. p_blk->size = 4;
  112. p_blk->flags.has_next = 0;
  113. p_blk->flags.is_free = 1;
  114. }
  115. // sets errno
  116. inline void* alloc(size_type size)
  117. {
  118. struct mem_blk* block_allocated;
  119. block_allocated = find_blk((mem_blk*)p_start, size);
  120. if (!block_allocated->flags.has_next
  121. && (!block_allocated->flags.is_free || block_allocated->size < size)) {
  122. // 'block_allocated' in the argument list is the pointer
  123. // pointing to the last block
  124. block_allocated = allocate_new_block(block_allocated, size);
  125. if (!block_allocated)
  126. return nullptr;
  127. } else {
  128. split_block(block_allocated, size);
  129. }
  130. block_allocated->flags.is_free = 0;
  131. return block_allocated->data;
  132. }
  133. inline void free(void* ptr)
  134. {
  135. mem_blk* blk = (mem_blk*)((byte*)ptr - (sizeof(mem_blk_flags) + sizeof(size_t)));
  136. blk->flags.is_free = 1;
  137. // TODO: fusion free blocks nearby
  138. }
  139. };
  140. }; // namespace __allocator
  141. template <typename T>
  142. concept Allocator = requires(size_t size, typename T::value_type* ptr)
  143. {
  144. typename T::value_type;
  145. {
  146. T::allocate_memory(size)
  147. } -> same_as<typename T::value_type*>;
  148. {
  149. T::deallocate_memory(ptr)
  150. } -> same_as<void>;
  151. };
  152. template <Allocator T>
  153. class allocator_traits;
  154. namespace __allocator {
  155. inline char __ident_heap[0x100000];
  156. inline __allocator::brk_memory_allocator
  157. m_alloc { (uint8_t*)__ident_heap, sizeof(__ident_heap) };
  158. } // namespace __allocator
  159. template <typename T>
  160. class kernel_ident_allocator {
  161. public:
  162. using value_type = T;
  163. static constexpr value_type* allocate_memory(size_t count)
  164. {
  165. return static_cast<value_type*>(__allocator::m_alloc.alloc(count));
  166. }
  167. static constexpr void deallocate_memory(value_type* ptr)
  168. {
  169. __allocator::m_alloc.free(ptr);
  170. }
  171. };
  172. template <template <typename _T> class Allocator, typename T, typename... Args>
  173. constexpr T* _new(Args&&... args)
  174. {
  175. return allocator_traits<Allocator<T>>::allocate_and_construct(std::forward<Args>(args)...);
  176. }
  177. template <template <typename _T> class Allocator, typename T, typename... Args>
  178. constexpr T* pnew(T* = nullptr, Args&&... args)
  179. {
  180. return _new<Allocator, T, Args...>(std::forward<Args>(args)...);
  181. }
  182. template <template <typename _T> class Allocator, typename T>
  183. constexpr void pdelete(T* ptr)
  184. {
  185. allocator_traits<Allocator<T>>::deconstruct_and_deallocate(ptr);
  186. }
  187. template <Allocator _allocator>
  188. class allocator_traits {
  189. public:
  190. using value_type = typename _allocator::value_type;
  191. static constexpr value_type* allocate(size_t count)
  192. {
  193. if (count == 0)
  194. return nullptr;
  195. return _allocator::allocate_memory(sizeof(value_type) * count);
  196. }
  197. template <typename... Args>
  198. static constexpr value_type* construct(value_type* ptr, Args&&... args)
  199. {
  200. new (ptr) value_type(std::forward<Args>(args)...);
  201. return ptr;
  202. }
  203. template <typename... Args>
  204. static constexpr value_type* allocate_and_construct(Args&&... args)
  205. {
  206. auto* ptr = allocate(1);
  207. construct(ptr, std::forward<Args>(args)...);
  208. return ptr;
  209. }
  210. static constexpr void deconstruct(value_type* ptr)
  211. {
  212. if (!ptr)
  213. return;
  214. ptr->~value_type();
  215. }
  216. static constexpr void deallocate(value_type* ptr)
  217. {
  218. if (!ptr)
  219. return;
  220. _allocator::deallocate_memory(ptr);
  221. }
  222. static constexpr void deconstruct_and_deallocate(value_type* ptr)
  223. {
  224. if (!ptr)
  225. return;
  226. deconstruct(ptr);
  227. deallocate(ptr);
  228. }
  229. };
  230. namespace __allocator {
  231. inline __allocator::brk_memory_allocator* m_palloc;
  232. inline void init_kernel_heap(void* start, size_t sz)
  233. {
  234. m_palloc = pnew<kernel_ident_allocator>(m_palloc, (uint8_t*)start, sz);
  235. }
  236. } // namespace __allocator
  237. template <typename T>
  238. class kernel_allocator {
  239. public:
  240. using value_type = T;
  241. static constexpr value_type* allocate_memory(size_t count)
  242. {
  243. return static_cast<value_type*>(__allocator::m_palloc->alloc(count));
  244. }
  245. static constexpr void deallocate_memory(value_type* ptr)
  246. {
  247. __allocator::m_palloc->free(ptr);
  248. }
  249. };
  250. } // namespace types