allocator.hpp 8.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318
  1. #pragma once
  2. #include <new>
  3. #include <utility>
  4. #include <type_traits>
  5. #include <assert.h>
  6. #include <stdint.h>
  7. #include <types/cplusplus.hpp>
  8. #include <types/types.h>
  9. namespace types {
  10. namespace __allocator {
  11. class brk_memory_allocator {
  12. public:
  13. using byte = uint8_t;
  14. using size_type = size_t;
  15. struct mem_blk_flags {
  16. uint8_t is_free;
  17. uint8_t has_next;
  18. uint8_t _unused2;
  19. uint8_t _unused3;
  20. };
  21. struct mem_blk {
  22. size_t size;
  23. struct mem_blk_flags flags;
  24. // the first byte of the memory space
  25. // the minimal allocated space is 4 bytes
  26. byte data[];
  27. };
  28. private:
  29. byte* p_start;
  30. byte* p_break;
  31. byte* p_limit;
  32. brk_memory_allocator(void) = delete;
  33. brk_memory_allocator(const brk_memory_allocator&) = delete;
  34. brk_memory_allocator(brk_memory_allocator&&) = delete;
  35. constexpr int brk(byte* addr)
  36. {
  37. if (unlikely(addr >= p_limit))
  38. return GB_FAILED;
  39. p_break = addr;
  40. return GB_OK;
  41. }
  42. // sets errno
  43. inline byte* sbrk(size_type increment)
  44. {
  45. if (unlikely(brk(p_break + increment) != GB_OK))
  46. return nullptr;
  47. else
  48. return p_break;
  49. }
  50. inline mem_blk* _find_next_mem_blk(mem_blk* blk, size_type blk_size)
  51. {
  52. byte* p = (byte*)blk;
  53. p += sizeof(mem_blk);
  54. p += blk_size;
  55. return (mem_blk*)p;
  56. }
  57. // sets errno
  58. // @param start_pos position where to start finding
  59. // @param size the size of the block we're looking for
  60. // @return found block if suitable block exists, if not, the last block
  61. inline mem_blk* find_blk(mem_blk* start_pos, size_type size)
  62. {
  63. while (1) {
  64. if (start_pos->flags.is_free && start_pos->size >= size) {
  65. return start_pos;
  66. } else {
  67. if (unlikely(!start_pos->flags.has_next))
  68. return start_pos;
  69. start_pos = _find_next_mem_blk(start_pos, start_pos->size);
  70. }
  71. }
  72. }
  73. inline mem_blk* allocate_new_block(mem_blk* blk_before, size_type size)
  74. {
  75. auto ret = sbrk(sizeof(mem_blk) + size);
  76. if (!ret)
  77. return nullptr;
  78. mem_blk* blk = _find_next_mem_blk(blk_before, blk_before->size);
  79. blk_before->flags.has_next = 1;
  80. blk->flags.has_next = 0;
  81. blk->flags.is_free = 1;
  82. blk->size = size;
  83. return blk;
  84. }
  85. inline void split_block(mem_blk* blk, size_type this_size)
  86. {
  87. // block is too small to get split
  88. if (blk->size < sizeof(mem_blk) + this_size) {
  89. return;
  90. }
  91. mem_blk* blk_next = _find_next_mem_blk(blk, this_size);
  92. blk_next->size = blk->size
  93. - this_size
  94. - sizeof(mem_blk);
  95. blk_next->flags.has_next = blk->flags.has_next;
  96. blk_next->flags.is_free = 1;
  97. blk->flags.has_next = 1;
  98. blk->size = this_size;
  99. }
  100. public:
  101. inline brk_memory_allocator(byte* start, size_type limit)
  102. : p_start(start)
  103. , p_limit(p_start + limit)
  104. {
  105. brk(p_start);
  106. mem_blk* p_blk = (mem_blk*)sbrk(0);
  107. p_blk->size = 8;
  108. p_blk->flags.has_next = 0;
  109. p_blk->flags.is_free = 1;
  110. }
  111. // sets errno
  112. inline void* alloc(size_type size)
  113. {
  114. struct mem_blk* block_allocated;
  115. if (size < 8)
  116. size = 8;
  117. block_allocated = find_blk((mem_blk*)p_start, size);
  118. if (!block_allocated->flags.has_next
  119. && (!block_allocated->flags.is_free || block_allocated->size < size)) {
  120. // 'block_allocated' in the argument list is the pointer
  121. // pointing to the last block
  122. block_allocated = allocate_new_block(block_allocated, size);
  123. if (!block_allocated)
  124. return nullptr;
  125. } else {
  126. split_block(block_allocated, size);
  127. }
  128. block_allocated->flags.is_free = 0;
  129. return block_allocated->data;
  130. }
  131. inline void free(void* ptr)
  132. {
  133. mem_blk* blk = (mem_blk*)((byte*)ptr - (sizeof(mem_blk_flags) + sizeof(size_t)));
  134. blk->flags.is_free = 1;
  135. // TODO: fusion free blocks nearby
  136. }
  137. };
  138. }; // namespace __allocator
  139. template <typename T>
  140. concept Allocator = requires(size_t size, typename T::value_type* ptr)
  141. {
  142. typename T::value_type;
  143. {
  144. T::allocate_memory(size)
  145. };
  146. {
  147. T::deallocate_memory(ptr)
  148. };
  149. std::is_same_v<typename T::value_type*, decltype(T::allocate_memory(size))>;
  150. std::is_same_v<void, decltype(T::deallocate_memory(ptr))>;
  151. };
  152. template <Allocator T>
  153. class allocator_traits;
  154. namespace __allocator {
  155. inline char __ident_heap[0x100000];
  156. inline __allocator::brk_memory_allocator
  157. m_alloc { (uint8_t*)__ident_heap, sizeof(__ident_heap) };
  158. } // namespace __allocator
  159. template <typename T>
  160. class kernel_ident_allocator {
  161. public:
  162. using value_type = T;
  163. static constexpr value_type* allocate_memory(size_t count)
  164. {
  165. return static_cast<value_type*>(__allocator::m_alloc.alloc(count));
  166. }
  167. static constexpr void deallocate_memory(value_type* ptr)
  168. {
  169. __allocator::m_alloc.free(ptr);
  170. }
  171. };
  172. template <template <typename _T> class Allocator, typename T, typename... Args>
  173. constexpr T* _new(Args&&... args)
  174. {
  175. return allocator_traits<Allocator<T>>::allocate_and_construct(std::forward<Args>(args)...);
  176. }
  177. template <template <typename _T> class Allocator, typename T, typename... Args>
  178. constexpr T* pnew(T* = nullptr, Args&&... args)
  179. {
  180. return _new<Allocator, T, Args...>(std::forward<Args>(args)...);
  181. }
  182. template <template <typename _T> class Allocator, typename T>
  183. constexpr void pdelete(T* ptr)
  184. {
  185. allocator_traits<Allocator<T>>::deconstruct_and_deallocate(ptr);
  186. }
  187. template <Allocator _allocator>
  188. class allocator_traits {
  189. public:
  190. using value_type = typename _allocator::value_type;
  191. static constexpr value_type* allocate(size_t count)
  192. {
  193. if (count == 0)
  194. return nullptr;
  195. return _allocator::allocate_memory(sizeof(value_type) * count);
  196. }
  197. template <typename... Args>
  198. static constexpr value_type* construct(value_type* ptr, Args&&... args)
  199. {
  200. new (ptr) value_type(std::forward<Args>(args)...);
  201. return ptr;
  202. }
  203. template <typename... Args>
  204. static constexpr value_type* allocate_and_construct(Args&&... args)
  205. {
  206. auto* ptr = allocate(1);
  207. construct(ptr, std::forward<Args>(args)...);
  208. return ptr;
  209. }
  210. static constexpr void deconstruct(value_type* ptr)
  211. {
  212. if (!ptr)
  213. return;
  214. ptr->~value_type();
  215. }
  216. static constexpr void deallocate(value_type* ptr)
  217. {
  218. if (!ptr)
  219. return;
  220. _allocator::deallocate_memory(ptr);
  221. }
  222. static constexpr void deconstruct_and_deallocate(value_type* ptr)
  223. {
  224. if (!ptr)
  225. return;
  226. deconstruct(ptr);
  227. deallocate(ptr);
  228. }
  229. };
  230. namespace __allocator {
  231. inline __allocator::brk_memory_allocator* m_palloc;
  232. inline void init_kernel_heap(void* start, size_t sz)
  233. {
  234. m_palloc = pnew<kernel_ident_allocator>(m_palloc, (uint8_t*)start, sz);
  235. }
  236. } // namespace __allocator
  237. template <typename T>
  238. class kernel_allocator {
  239. public:
  240. using value_type = T;
  241. static constexpr value_type* allocate_memory(size_t count)
  242. {
  243. return static_cast<value_type*>(__allocator::m_palloc->alloc(count));
  244. }
  245. static constexpr void deallocate_memory(value_type* ptr)
  246. {
  247. __allocator::m_palloc->free(ptr);
  248. }
  249. };
  250. template <typename T, template <typename> typename Allocator>
  251. struct allocator_adapter {
  252. using value_type = typename Allocator<T>::value_type;
  253. using propagate_on_container_move_assignment = std::true_type;
  254. constexpr allocator_adapter() = default;
  255. template <template <typename> typename UAlloc, typename U>
  256. constexpr allocator_adapter(const allocator_adapter<U, UAlloc>&)
  257. noexcept {}
  258. constexpr T* allocate(std::size_t n)
  259. { return types::allocator_traits<Allocator<T>>::allocate(n); }
  260. constexpr void deallocate(T* ptr, std::size_t)
  261. { return types::allocator_traits<Allocator<T>>::deallocate(ptr); }
  262. template <typename U>
  263. struct rebind { using other = allocator_adapter<U, Allocator>; };
  264. };
  265. } // namespace types