1 /*
2  * Copyright (c) 2020 Andrew Kelley
3  *
4  * This file is part of zig, which is MIT licensed.
5  * See http://opensource.org/licenses/MIT
6  */
7 
8 #include <new>
9 #include <string.h>
10 
11 #include "heap.hpp"
12 
13 namespace heap {
14 
15 extern mem::Allocator &bootstrap_allocator;
16 
17 //
18 // BootstrapAllocator implementation is identical to CAllocator minus
19 // profile profile functionality. Splitting off to a base interface doesn't
20 // seem worthwhile.
21 //
22 
init(const char * name)23 void BootstrapAllocator::init(const char *name) {}
deinit()24 void BootstrapAllocator::deinit() {}
25 
internal_allocate(const mem::TypeInfo & info,size_t count)26 void *BootstrapAllocator::internal_allocate(const mem::TypeInfo &info, size_t count) {
27     return mem::os::calloc(count, info.size);
28 }
29 
internal_allocate_nonzero(const mem::TypeInfo & info,size_t count)30 void *BootstrapAllocator::internal_allocate_nonzero(const mem::TypeInfo &info, size_t count) {
31     return mem::os::malloc(count * info.size);
32 }
33 
internal_reallocate(const mem::TypeInfo & info,void * old_ptr,size_t old_count,size_t new_count)34 void *BootstrapAllocator::internal_reallocate(const mem::TypeInfo &info, void *old_ptr, size_t old_count, size_t new_count) {
35     auto new_ptr = this->internal_reallocate_nonzero(info, old_ptr, old_count, new_count);
36     if (new_count > old_count)
37         memset(reinterpret_cast<uint8_t *>(new_ptr) + (old_count * info.size), 0, (new_count - old_count) * info.size);
38     return new_ptr;
39 }
40 
internal_reallocate_nonzero(const mem::TypeInfo & info,void * old_ptr,size_t old_count,size_t new_count)41 void *BootstrapAllocator::internal_reallocate_nonzero(const mem::TypeInfo &info, void *old_ptr, size_t old_count, size_t new_count) {
42     return mem::os::realloc(old_ptr, new_count * info.size);
43 }
44 
internal_deallocate(const mem::TypeInfo & info,void * ptr,size_t count)45 void BootstrapAllocator::internal_deallocate(const mem::TypeInfo &info, void *ptr, size_t count) {
46     mem::os::free(ptr);
47 }
48 
init(const char * name)49 void CAllocator::init(const char *name) { }
50 
deinit()51 void CAllocator::deinit() { }
52 
construct(mem::Allocator * allocator,const char * name)53 CAllocator *CAllocator::construct(mem::Allocator *allocator, const char *name) {
54     auto p = new(allocator->create<CAllocator>()) CAllocator();
55     p->init(name);
56     return p;
57 }
58 
destruct(mem::Allocator * allocator)59 void CAllocator::destruct(mem::Allocator *allocator) {
60     this->deinit();
61     allocator->destroy(this);
62 }
63 
internal_allocate(const mem::TypeInfo & info,size_t count)64 void *CAllocator::internal_allocate(const mem::TypeInfo &info, size_t count) {
65     return mem::os::calloc(count, info.size);
66 }
67 
internal_allocate_nonzero(const mem::TypeInfo & info,size_t count)68 void *CAllocator::internal_allocate_nonzero(const mem::TypeInfo &info, size_t count) {
69     return mem::os::malloc(count * info.size);
70 }
71 
internal_reallocate(const mem::TypeInfo & info,void * old_ptr,size_t old_count,size_t new_count)72 void *CAllocator::internal_reallocate(const mem::TypeInfo &info, void *old_ptr, size_t old_count, size_t new_count) {
73     auto new_ptr = this->internal_reallocate_nonzero(info, old_ptr, old_count, new_count);
74     if (new_count > old_count)
75         memset(reinterpret_cast<uint8_t *>(new_ptr) + (old_count * info.size), 0, (new_count - old_count) * info.size);
76     return new_ptr;
77 }
78 
internal_reallocate_nonzero(const mem::TypeInfo & info,void * old_ptr,size_t old_count,size_t new_count)79 void *CAllocator::internal_reallocate_nonzero(const mem::TypeInfo &info, void *old_ptr, size_t old_count, size_t new_count) {
80     return mem::os::realloc(old_ptr, new_count * info.size);
81 }
82 
internal_deallocate(const mem::TypeInfo & info,void * ptr,size_t count)83 void CAllocator::internal_deallocate(const mem::TypeInfo &info, void *ptr, size_t count) {
84     mem::os::free(ptr);
85 }
86 
87 struct ArenaAllocator::Impl {
88     Allocator *backing;
89 
90     // regular allocations bump through a segment of static size
91     struct Segment {
92         static constexpr size_t size = 65536;
93         static constexpr size_t object_threshold = 4096;
94 
95         uint8_t data[size];
96     };
97 
98     // active segment
99     Segment *segment;
100     size_t segment_offset;
101 
102     // keep track of segments
103     struct SegmentTrack {
104         static constexpr size_t size = (4096 - sizeof(SegmentTrack *)) / sizeof(Segment *);
105 
106         // null if first
107         SegmentTrack *prev;
108         Segment *segments[size];
109     };
110     static_assert(sizeof(SegmentTrack) <= 4096, "unwanted struct padding");
111 
112     // active segment track
113     SegmentTrack *segment_track;
114     size_t segment_track_remain;
115 
116     // individual allocations punted to backing allocator
117     struct Object {
118         uint8_t *ptr;
119         size_t len;
120     };
121 
122     // keep track of objects
123     struct ObjectTrack {
124         static constexpr size_t size = (4096 - sizeof(ObjectTrack *)) / sizeof(Object);
125 
126         // null if first
127         ObjectTrack *prev;
128         Object objects[size];
129     };
130     static_assert(sizeof(ObjectTrack) <= 4096, "unwanted struct padding");
131 
132     // active object track
133     ObjectTrack *object_track;
134     size_t object_track_remain;
135 
136     ATTRIBUTE_RETURNS_NOALIAS inline void *allocate(const mem::TypeInfo& info, size_t count);
137     inline void *reallocate(const mem::TypeInfo& info, void *old_ptr, size_t old_count, size_t new_count);
138 
139     inline void new_segment();
140     inline void track_segment();
141     inline void track_object(Object object);
142 };
143 
allocate(const mem::TypeInfo & info,size_t count)144 void *ArenaAllocator::Impl::allocate(const mem::TypeInfo& info, size_t count) {
145 #ifndef NDEBUG
146     // make behavior when size == 0 portable
147     if (info.size == 0 || count == 0)
148         return nullptr;
149 #endif
150     const size_t nbytes = info.size * count;
151     this->segment_offset = (this->segment_offset + (info.alignment - 1)) & ~(info.alignment - 1);
152     if (nbytes >= Segment::object_threshold) {
153         auto ptr = this->backing->allocate<uint8_t>(nbytes);
154         this->track_object({ptr, nbytes});
155         return ptr;
156     }
157     if (this->segment_offset + nbytes > Segment::size)
158         this->new_segment();
159     auto ptr = &this->segment->data[this->segment_offset];
160     this->segment_offset += nbytes;
161     return ptr;
162 }
163 
reallocate(const mem::TypeInfo & info,void * old_ptr,size_t old_count,size_t new_count)164 void *ArenaAllocator::Impl::reallocate(const mem::TypeInfo& info, void *old_ptr, size_t old_count, size_t new_count) {
165 #ifndef NDEBUG
166     // make behavior when size == 0 portable
167     if (info.size == 0 && old_ptr == nullptr)
168         return nullptr;
169 #endif
170     const size_t new_nbytes = info.size * new_count;
171     if (new_nbytes <= info.size * old_count)
172         return old_ptr;
173     const size_t old_nbytes = info.size * old_count;
174     this->segment_offset = (this->segment_offset + (info.alignment - 1)) & ~(info.alignment - 1);
175     if (new_nbytes >= Segment::object_threshold) {
176         auto new_ptr = this->backing->allocate<uint8_t>(new_nbytes);
177         this->track_object({new_ptr, new_nbytes});
178         memcpy(new_ptr, old_ptr, old_nbytes);
179         return new_ptr;
180     }
181     if (this->segment_offset + new_nbytes > Segment::size)
182         this->new_segment();
183     auto new_ptr = &this->segment->data[this->segment_offset];
184     this->segment_offset += new_nbytes;
185     memcpy(new_ptr, old_ptr, old_nbytes);
186     return new_ptr;
187 }
188 
new_segment()189 void ArenaAllocator::Impl::new_segment() {
190     this->segment = this->backing->create<Segment>();
191     this->segment_offset = 0;
192     this->track_segment();
193 }
194 
track_segment()195 void ArenaAllocator::Impl::track_segment() {
196     assert(this->segment != nullptr);
197     if (this->segment_track_remain < 1) {
198         auto prev = this->segment_track;
199         this->segment_track = this->backing->create<SegmentTrack>();
200         this->segment_track->prev = prev;
201         this->segment_track_remain = SegmentTrack::size;
202     }
203     this->segment_track_remain -= 1;
204     this->segment_track->segments[this->segment_track_remain] = this->segment;
205 }
206 
track_object(Object object)207 void ArenaAllocator::Impl::track_object(Object object) {
208     if (this->object_track_remain < 1) {
209         auto prev = this->object_track;
210         this->object_track = this->backing->create<ObjectTrack>();
211         this->object_track->prev = prev;
212         this->object_track_remain = ObjectTrack::size;
213     }
214     this->object_track_remain -= 1;
215     this->object_track->objects[this->object_track_remain] = object;
216 }
217 
init(Allocator * backing,const char * name)218 void ArenaAllocator::init(Allocator *backing, const char *name) {
219     this->impl = bootstrap_allocator.create<Impl>();
220     {
221         auto &r = *this->impl;
222         r.backing = backing;
223         r.segment_offset = Impl::Segment::size;
224     }
225 }
226 
deinit()227 void ArenaAllocator::deinit() {
228     auto &backing = *this->impl->backing;
229 
230     // segments
231     if (this->impl->segment_track) {
232         // active track is not full and bounded by track_remain
233         auto prev = this->impl->segment_track->prev;
234         {
235             auto t = this->impl->segment_track;
236             for (size_t i = this->impl->segment_track_remain; i < Impl::SegmentTrack::size; ++i)
237                 backing.destroy(t->segments[i]);
238             backing.destroy(t);
239         }
240 
241         // previous tracks are full
242         for (auto t = prev; t != nullptr;) {
243             for (size_t i = 0; i < Impl::SegmentTrack::size; ++i)
244                 backing.destroy(t->segments[i]);
245             prev = t->prev;
246             backing.destroy(t);
247             t = prev;
248         }
249     }
250 
251     // objects
252     if (this->impl->object_track) {
253         // active track is not full and bounded by track_remain
254         auto prev = this->impl->object_track->prev;
255         {
256             auto t = this->impl->object_track;
257             for (size_t i = this->impl->object_track_remain; i < Impl::ObjectTrack::size; ++i) {
258                 auto &obj = t->objects[i];
259                 backing.deallocate(obj.ptr, obj.len);
260             }
261             backing.destroy(t);
262         }
263 
264         // previous tracks are full
265         for (auto t = prev; t != nullptr;) {
266             for (size_t i = 0; i < Impl::ObjectTrack::size; ++i) {
267                 auto &obj = t->objects[i];
268                 backing.deallocate(obj.ptr, obj.len);
269             }
270             prev = t->prev;
271             backing.destroy(t);
272             t = prev;
273         }
274     }
275 }
276 
construct(mem::Allocator * allocator,mem::Allocator * backing,const char * name)277 ArenaAllocator *ArenaAllocator::construct(mem::Allocator *allocator, mem::Allocator *backing, const char *name) {
278     auto p = new(allocator->create<ArenaAllocator>()) ArenaAllocator;
279     p->init(backing, name);
280     return p;
281 }
282 
destruct(mem::Allocator * allocator)283 void ArenaAllocator::destruct(mem::Allocator *allocator) {
284     this->deinit();
285     allocator->destroy(this);
286 }
287 
internal_allocate(const mem::TypeInfo & info,size_t count)288 void *ArenaAllocator::internal_allocate(const mem::TypeInfo &info, size_t count) {
289     return this->impl->allocate(info, count);
290 }
291 
internal_allocate_nonzero(const mem::TypeInfo & info,size_t count)292 void *ArenaAllocator::internal_allocate_nonzero(const mem::TypeInfo &info, size_t count) {
293     return this->impl->allocate(info, count);
294 }
295 
internal_reallocate(const mem::TypeInfo & info,void * old_ptr,size_t old_count,size_t new_count)296 void *ArenaAllocator::internal_reallocate(const mem::TypeInfo &info, void *old_ptr, size_t old_count, size_t new_count) {
297     return this->internal_reallocate_nonzero(info, old_ptr, old_count, new_count);
298 }
299 
internal_reallocate_nonzero(const mem::TypeInfo & info,void * old_ptr,size_t old_count,size_t new_count)300 void *ArenaAllocator::internal_reallocate_nonzero(const mem::TypeInfo &info, void *old_ptr, size_t old_count, size_t new_count) {
301     return this->impl->reallocate(info, old_ptr, old_count, new_count);
302 }
303 
internal_deallocate(const mem::TypeInfo & info,void * ptr,size_t count)304 void ArenaAllocator::internal_deallocate(const mem::TypeInfo &info, void *ptr, size_t count) {
305     // noop
306 }
307 
308 BootstrapAllocator bootstrap_allocator_state;
309 mem::Allocator &bootstrap_allocator = bootstrap_allocator_state;
310 
311 CAllocator c_allocator_state;
312 mem::Allocator &c_allocator = c_allocator_state;
313 
314 } // namespace heap
315