1 /*
2 * Copyright (c) 2021 Calvin Rose
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a copy
5 * of this software and associated documentation files (the "Software"), to
6 * deal in the Software without restriction, including without limitation the
7 * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
8 * sell copies of the Software, and to permit persons to whom the Software is
9 * furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
17 * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
18 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
19 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
20 * IN THE SOFTWARE.
21 */
22
23 #ifndef JANET_AMALG
24 #include "features.h"
25 #include <janet.h>
26 #include "state.h"
27 #include "symcache.h"
28 #include "gc.h"
29 #include "util.h"
30 #include "fiber.h"
31 #include "vector.h"
32 #endif
33
34 /* Helpers for marking the various gc types */
35 static void janet_mark_funcenv(JanetFuncEnv *env);
36 static void janet_mark_funcdef(JanetFuncDef *def);
37 static void janet_mark_function(JanetFunction *func);
38 static void janet_mark_array(JanetArray *array);
39 static void janet_mark_table(JanetTable *table);
40 static void janet_mark_struct(const JanetKV *st);
41 static void janet_mark_tuple(const Janet *tuple);
42 static void janet_mark_buffer(JanetBuffer *buffer);
43 static void janet_mark_string(const uint8_t *str);
44 static void janet_mark_fiber(JanetFiber *fiber);
45 static void janet_mark_abstract(void *adata);
46
47 /* Local state that is only temporary for gc */
48 static JANET_THREAD_LOCAL uint32_t depth = JANET_RECURSION_GUARD;
49 static JANET_THREAD_LOCAL size_t orig_rootcount;
50
51 /* Hint to the GC that we may need to collect */
janet_gcpressure(size_t s)52 void janet_gcpressure(size_t s) {
53 janet_vm.next_collection += s;
54 }
55
56 /* Mark a value */
janet_mark(Janet x)57 void janet_mark(Janet x) {
58 if (depth) {
59 depth--;
60 switch (janet_type(x)) {
61 default:
62 break;
63 case JANET_STRING:
64 case JANET_KEYWORD:
65 case JANET_SYMBOL:
66 janet_mark_string(janet_unwrap_string(x));
67 break;
68 case JANET_FUNCTION:
69 janet_mark_function(janet_unwrap_function(x));
70 break;
71 case JANET_ARRAY:
72 janet_mark_array(janet_unwrap_array(x));
73 break;
74 case JANET_TABLE:
75 janet_mark_table(janet_unwrap_table(x));
76 break;
77 case JANET_STRUCT:
78 janet_mark_struct(janet_unwrap_struct(x));
79 break;
80 case JANET_TUPLE:
81 janet_mark_tuple(janet_unwrap_tuple(x));
82 break;
83 case JANET_BUFFER:
84 janet_mark_buffer(janet_unwrap_buffer(x));
85 break;
86 case JANET_FIBER:
87 janet_mark_fiber(janet_unwrap_fiber(x));
88 break;
89 case JANET_ABSTRACT:
90 janet_mark_abstract(janet_unwrap_abstract(x));
91 break;
92 }
93 depth++;
94 } else {
95 janet_gcroot(x);
96 }
97 }
98
janet_mark_string(const uint8_t * str)99 static void janet_mark_string(const uint8_t *str) {
100 janet_gc_mark(janet_string_head(str));
101 }
102
janet_mark_buffer(JanetBuffer * buffer)103 static void janet_mark_buffer(JanetBuffer *buffer) {
104 janet_gc_mark(buffer);
105 }
106
janet_mark_abstract(void * adata)107 static void janet_mark_abstract(void *adata) {
108 #ifdef JANET_EV
109 /* Check if abstract type is a threaded abstract type. If it is, marking means
110 * updating the threaded_abstract table. */
111 if ((janet_abstract_head(adata)->gc.flags & JANET_MEM_TYPEBITS) == JANET_MEMORY_THREADED_ABSTRACT) {
112 janet_table_put(&janet_vm.threaded_abstracts, janet_wrap_abstract(adata), janet_wrap_true());
113 return;
114 }
115 #endif
116 if (janet_gc_reachable(janet_abstract_head(adata)))
117 return;
118 janet_gc_mark(janet_abstract_head(adata));
119 if (janet_abstract_head(adata)->type->gcmark) {
120 janet_abstract_head(adata)->type->gcmark(adata, janet_abstract_size(adata));
121 }
122 }
123
124 /* Mark a bunch of items in memory */
janet_mark_many(const Janet * values,int32_t n)125 static void janet_mark_many(const Janet *values, int32_t n) {
126 if (values == NULL)
127 return;
128 const Janet *end = values + n;
129 while (values < end) {
130 janet_mark(*values);
131 values += 1;
132 }
133 }
134
135 /* Mark a bunch of key values items in memory */
janet_mark_kvs(const JanetKV * kvs,int32_t n)136 static void janet_mark_kvs(const JanetKV *kvs, int32_t n) {
137 const JanetKV *end = kvs + n;
138 while (kvs < end) {
139 janet_mark(kvs->key);
140 janet_mark(kvs->value);
141 kvs++;
142 }
143 }
144
janet_mark_array(JanetArray * array)145 static void janet_mark_array(JanetArray *array) {
146 if (janet_gc_reachable(array))
147 return;
148 janet_gc_mark(array);
149 janet_mark_many(array->data, array->count);
150 }
151
janet_mark_table(JanetTable * table)152 static void janet_mark_table(JanetTable *table) {
153 recur: /* Manual tail recursion */
154 if (janet_gc_reachable(table))
155 return;
156 janet_gc_mark(table);
157 janet_mark_kvs(table->data, table->capacity);
158 if (table->proto) {
159 table = table->proto;
160 goto recur;
161 }
162 }
163
janet_mark_struct(const JanetKV * st)164 static void janet_mark_struct(const JanetKV *st) {
165 recur:
166 if (janet_gc_reachable(janet_struct_head(st)))
167 return;
168 janet_gc_mark(janet_struct_head(st));
169 janet_mark_kvs(st, janet_struct_capacity(st));
170 st = janet_struct_proto(st);
171 if (st) goto recur;
172 }
173
janet_mark_tuple(const Janet * tuple)174 static void janet_mark_tuple(const Janet *tuple) {
175 if (janet_gc_reachable(janet_tuple_head(tuple)))
176 return;
177 janet_gc_mark(janet_tuple_head(tuple));
178 janet_mark_many(tuple, janet_tuple_length(tuple));
179 }
180
181 /* Helper to mark function environments */
janet_mark_funcenv(JanetFuncEnv * env)182 static void janet_mark_funcenv(JanetFuncEnv *env) {
183 if (janet_gc_reachable(env))
184 return;
185 janet_gc_mark(env);
186 /* If closure env references a dead fiber, we can just copy out the stack frame we need so
187 * we don't need to keep around the whole dead fiber. */
188 janet_env_maybe_detach(env);
189 if (env->offset > 0) {
190 /* On stack */
191 janet_mark_fiber(env->as.fiber);
192 } else {
193 /* Not on stack */
194 janet_mark_many(env->as.values, env->length);
195 }
196 }
197
198 /* GC helper to mark a FuncDef */
janet_mark_funcdef(JanetFuncDef * def)199 static void janet_mark_funcdef(JanetFuncDef *def) {
200 int32_t i;
201 if (janet_gc_reachable(def))
202 return;
203 janet_gc_mark(def);
204 janet_mark_many(def->constants, def->constants_length);
205 for (i = 0; i < def->defs_length; ++i) {
206 janet_mark_funcdef(def->defs[i]);
207 }
208 if (def->source)
209 janet_mark_string(def->source);
210 if (def->name)
211 janet_mark_string(def->name);
212 }
213
janet_mark_function(JanetFunction * func)214 static void janet_mark_function(JanetFunction *func) {
215 int32_t i;
216 int32_t numenvs;
217 if (janet_gc_reachable(func))
218 return;
219 janet_gc_mark(func);
220 if (NULL != func->def) {
221 /* this should always be true, except if function is only partially constructed */
222 numenvs = func->def->environments_length;
223 for (i = 0; i < numenvs; ++i) {
224 janet_mark_funcenv(func->envs[i]);
225 }
226 janet_mark_funcdef(func->def);
227 }
228 }
229
janet_mark_fiber(JanetFiber * fiber)230 static void janet_mark_fiber(JanetFiber *fiber) {
231 int32_t i, j;
232 JanetStackFrame *frame;
233 recur:
234 if (janet_gc_reachable(fiber))
235 return;
236 janet_gc_mark(fiber);
237
238 janet_mark(fiber->last_value);
239
240 /* Mark values on the argument stack */
241 janet_mark_many(fiber->data + fiber->stackstart,
242 fiber->stacktop - fiber->stackstart);
243
244 i = fiber->frame;
245 j = fiber->stackstart - JANET_FRAME_SIZE;
246 while (i > 0) {
247 frame = (JanetStackFrame *)(fiber->data + i - JANET_FRAME_SIZE);
248 if (NULL != frame->func)
249 janet_mark_function(frame->func);
250 if (NULL != frame->env)
251 janet_mark_funcenv(frame->env);
252 /* Mark all values in the stack frame */
253 janet_mark_many(fiber->data + i, j - i);
254 j = i - JANET_FRAME_SIZE;
255 i = frame->prevframe;
256 }
257
258 if (fiber->env)
259 janet_mark_table(fiber->env);
260
261 #ifdef JANET_EV
262 if (fiber->supervisor_channel) {
263 janet_mark_abstract(fiber->supervisor_channel);
264 }
265 #endif
266
267 /* Explicit tail recursion */
268 if (fiber->child) {
269 fiber = fiber->child;
270 goto recur;
271 }
272 }
273
274 /* Deinitialize a block of memory */
janet_deinit_block(JanetGCObject * mem)275 static void janet_deinit_block(JanetGCObject *mem) {
276 switch (mem->flags & JANET_MEM_TYPEBITS) {
277 default:
278 case JANET_MEMORY_FUNCTION:
279 break; /* Do nothing for non gc types */
280 case JANET_MEMORY_SYMBOL:
281 janet_symbol_deinit(((JanetStringHead *) mem)->data);
282 break;
283 case JANET_MEMORY_ARRAY:
284 janet_free(((JanetArray *) mem)->data);
285 break;
286 case JANET_MEMORY_TABLE:
287 janet_free(((JanetTable *) mem)->data);
288 break;
289 case JANET_MEMORY_FIBER:
290 janet_free(((JanetFiber *)mem)->data);
291 break;
292 case JANET_MEMORY_BUFFER:
293 janet_buffer_deinit((JanetBuffer *) mem);
294 break;
295 case JANET_MEMORY_ABSTRACT: {
296 JanetAbstractHead *head = (JanetAbstractHead *)mem;
297 if (head->type->gc) {
298 janet_assert(!head->type->gc(head->data, head->size), "finalizer failed");
299 }
300 }
301 break;
302 case JANET_MEMORY_FUNCENV: {
303 JanetFuncEnv *env = (JanetFuncEnv *)mem;
304 if (0 == env->offset)
305 janet_free(env->as.values);
306 }
307 break;
308 case JANET_MEMORY_FUNCDEF: {
309 JanetFuncDef *def = (JanetFuncDef *)mem;
310 /* TODO - get this all with one alloc and one free */
311 janet_free(def->defs);
312 janet_free(def->environments);
313 janet_free(def->constants);
314 janet_free(def->bytecode);
315 janet_free(def->sourcemap);
316 janet_free(def->closure_bitset);
317 }
318 break;
319 }
320 }
321
322 /* Iterate over all allocated memory, and free memory that is not
323 * marked as reachable. Flip the gc color flag for next sweep. */
janet_sweep()324 void janet_sweep() {
325 JanetGCObject *previous = NULL;
326 JanetGCObject *current = janet_vm.blocks;
327 JanetGCObject *next;
328 while (NULL != current) {
329 next = current->data.next;
330 if (current->flags & (JANET_MEM_REACHABLE | JANET_MEM_DISABLED)) {
331 previous = current;
332 current->flags &= ~JANET_MEM_REACHABLE;
333 } else {
334 janet_vm.block_count--;
335 janet_deinit_block(current);
336 if (NULL != previous) {
337 previous->data.next = next;
338 } else {
339 janet_vm.blocks = next;
340 }
341 janet_free(current);
342 }
343 current = next;
344 }
345 #ifdef JANET_EV
346 /* Sweep threaded abstract types for references to decrement */
347 JanetKV *items = janet_vm.threaded_abstracts.data;
348 for (int32_t i = 0; i < janet_vm.threaded_abstracts.capacity; i++) {
349 if (janet_checktype(items[i].key, JANET_ABSTRACT)) {
350
351 /* If item was not visited during the mark phase, then this
352 * abstract type isn't present in the heap and needs its refcount
353 * decremented, and shouuld be removed from table. If the refcount is
354 * then 0, the item will be collected. This ensures that only one interpreter
355 * will clean up the threaded abstract. */
356
357 /* If not visited... */
358 if (!janet_truthy(items[i].value)) {
359 void *abst = janet_unwrap_abstract(items[i].key);
360 if (0 == janet_abstract_decref(abst)) {
361 /* Run finalizer */
362 JanetAbstractHead *head = janet_abstract_head(abst);
363 if (head->type->gc) {
364 janet_assert(!head->type->gc(head->data, head->size), "finalizer failed");
365 }
366 /* Mark as tombstone in place */
367 items[i].key = janet_wrap_nil();
368 items[i].value = janet_wrap_false();
369 janet_vm.threaded_abstracts.deleted++;
370 janet_vm.threaded_abstracts.count--;
371 /* Free memory */
372 janet_free(janet_abstract_head(abst));
373 }
374 }
375
376 /* Reset for next sweep */
377 items[i].value = janet_wrap_false();
378 }
379 }
380 #endif
381 }
382
383 /* Allocate some memory that is tracked for garbage collection */
janet_gcalloc(enum JanetMemoryType type,size_t size)384 void *janet_gcalloc(enum JanetMemoryType type, size_t size) {
385 JanetGCObject *mem;
386
387 /* Make sure everything is inited */
388 janet_assert(NULL != janet_vm.cache, "please initialize janet before use");
389 mem = janet_malloc(size);
390
391 /* Check for bad malloc */
392 if (NULL == mem) {
393 JANET_OUT_OF_MEMORY;
394 }
395
396 /* Configure block */
397 mem->flags = type;
398
399 /* Prepend block to heap list */
400 janet_vm.next_collection += size;
401 mem->data.next = janet_vm.blocks;
402 janet_vm.blocks = mem;
403 janet_vm.block_count++;
404
405 return (void *)mem;
406 }
407
free_one_scratch(JanetScratch * s)408 static void free_one_scratch(JanetScratch *s) {
409 if (NULL != s->finalize) {
410 s->finalize((char *) s->mem);
411 }
412 janet_free(s);
413 }
414
415 /* Free all allocated scratch memory */
janet_free_all_scratch(void)416 static void janet_free_all_scratch(void) {
417 for (size_t i = 0; i < janet_vm.scratch_len; i++) {
418 free_one_scratch(janet_vm.scratch_mem[i]);
419 }
420 janet_vm.scratch_len = 0;
421 }
422
janet_mem2scratch(void * mem)423 static JanetScratch *janet_mem2scratch(void *mem) {
424 JanetScratch *s = (JanetScratch *)mem;
425 return s - 1;
426 }
427
428 /* Run garbage collection */
janet_collect(void)429 void janet_collect(void) {
430 uint32_t i;
431 if (janet_vm.gc_suspend) return;
432 depth = JANET_RECURSION_GUARD;
433 /* Try and prevent many major collections back to back.
434 * A full collection will take O(janet_vm.block_count) time.
435 * If we have a large heap, make sure our interval is not too
436 * small so we won't make many collections over it. This is just a
437 * heuristic for automatically changing the gc interval */
438 if (janet_vm.block_count * 8 > janet_vm.gc_interval) {
439 janet_vm.gc_interval = janet_vm.block_count * sizeof(JanetGCObject);
440 }
441 orig_rootcount = janet_vm.root_count;
442 #ifdef JANET_EV
443 janet_ev_mark();
444 #endif
445 janet_mark_fiber(janet_vm.root_fiber);
446 for (i = 0; i < orig_rootcount; i++)
447 janet_mark(janet_vm.roots[i]);
448 while (orig_rootcount < janet_vm.root_count) {
449 Janet x = janet_vm.roots[--janet_vm.root_count];
450 janet_mark(x);
451 }
452 janet_sweep();
453 janet_vm.next_collection = 0;
454 janet_free_all_scratch();
455 }
456
457 /* Add a root value to the GC. This prevents the GC from removing a value
458 * and all of its children. If gcroot is called on a value n times, unroot
459 * must also be called n times to remove it as a gc root. */
janet_gcroot(Janet root)460 void janet_gcroot(Janet root) {
461 size_t newcount = janet_vm.root_count + 1;
462 if (newcount > janet_vm.root_capacity) {
463 size_t newcap = 2 * newcount;
464 janet_vm.roots = janet_realloc(janet_vm.roots, sizeof(Janet) * newcap);
465 if (NULL == janet_vm.roots) {
466 JANET_OUT_OF_MEMORY;
467 }
468 janet_vm.root_capacity = newcap;
469 }
470 janet_vm.roots[janet_vm.root_count] = root;
471 janet_vm.root_count = newcount;
472 }
473
474 /* Identity equality for GC purposes */
janet_gc_idequals(Janet lhs,Janet rhs)475 static int janet_gc_idequals(Janet lhs, Janet rhs) {
476 if (janet_type(lhs) != janet_type(rhs))
477 return 0;
478 switch (janet_type(lhs)) {
479 case JANET_BOOLEAN:
480 case JANET_NIL:
481 case JANET_NUMBER:
482 /* These values don't really matter to the gc so returning 1 all the time is fine. */
483 return 1;
484 default:
485 return janet_unwrap_pointer(lhs) == janet_unwrap_pointer(rhs);
486 }
487 }
488
489 /* Remove a root value from the GC. This allows the gc to potentially reclaim
490 * a value and all its children. */
janet_gcunroot(Janet root)491 int janet_gcunroot(Janet root) {
492 Janet *vtop = janet_vm.roots + janet_vm.root_count;
493 /* Search from top to bottom as access is most likely LIFO */
494 for (Janet *v = janet_vm.roots; v < vtop; v++) {
495 if (janet_gc_idequals(root, *v)) {
496 *v = janet_vm.roots[--janet_vm.root_count];
497 return 1;
498 }
499 }
500 return 0;
501 }
502
503 /* Remove a root value from the GC. This sets the effective reference count to 0. */
janet_gcunrootall(Janet root)504 int janet_gcunrootall(Janet root) {
505 Janet *vtop = janet_vm.roots + janet_vm.root_count;
506 int ret = 0;
507 /* Search from top to bottom as access is most likely LIFO */
508 for (Janet *v = janet_vm.roots; v < vtop; v++) {
509 if (janet_gc_idequals(root, *v)) {
510 *v = janet_vm.roots[--janet_vm.root_count];
511 vtop--;
512 ret = 1;
513 }
514 }
515 return ret;
516 }
517
518 /* Free all allocated memory */
janet_clear_memory(void)519 void janet_clear_memory(void) {
520 #ifdef JANET_EV
521 JanetKV *items = janet_vm.threaded_abstracts.data;
522 for (int32_t i = 0; i < janet_vm.threaded_abstracts.capacity; i++) {
523 if (janet_checktype(items[i].key, JANET_ABSTRACT)) {
524 void *abst = janet_unwrap_abstract(items[i].key);
525 if (0 == janet_abstract_decref(abst)) {
526 JanetAbstractHead *head = janet_abstract_head(abst);
527 if (head->type->gc) {
528 janet_assert(!head->type->gc(head->data, head->size), "finalizer failed");
529 }
530 janet_free(janet_abstract_head(abst));
531 }
532 }
533 }
534 #endif
535 JanetGCObject *current = janet_vm.blocks;
536 while (NULL != current) {
537 janet_deinit_block(current);
538 JanetGCObject *next = current->data.next;
539 janet_free(current);
540 current = next;
541 }
542 janet_vm.blocks = NULL;
543 janet_free_all_scratch();
544 janet_free(janet_vm.scratch_mem);
545 }
546
547 /* Primitives for suspending GC. */
janet_gclock(void)548 int janet_gclock(void) {
549 return janet_vm.gc_suspend++;
550 }
janet_gcunlock(int handle)551 void janet_gcunlock(int handle) {
552 janet_vm.gc_suspend = handle;
553 }
554
555 /* Scratch memory API */
556
janet_smalloc(size_t size)557 void *janet_smalloc(size_t size) {
558 JanetScratch *s = janet_malloc(sizeof(JanetScratch) + size);
559 if (NULL == s) {
560 JANET_OUT_OF_MEMORY;
561 }
562 s->finalize = NULL;
563 if (janet_vm.scratch_len == janet_vm.scratch_cap) {
564 size_t newcap = 2 * janet_vm.scratch_cap + 2;
565 JanetScratch **newmem = (JanetScratch **) janet_realloc(janet_vm.scratch_mem, newcap * sizeof(JanetScratch));
566 if (NULL == newmem) {
567 JANET_OUT_OF_MEMORY;
568 }
569 janet_vm.scratch_cap = newcap;
570 janet_vm.scratch_mem = newmem;
571 }
572 janet_vm.scratch_mem[janet_vm.scratch_len++] = s;
573 return (char *)(s->mem);
574 }
575
janet_scalloc(size_t nmemb,size_t size)576 void *janet_scalloc(size_t nmemb, size_t size) {
577 if (nmemb && size > SIZE_MAX / nmemb) {
578 JANET_OUT_OF_MEMORY;
579 }
580 size_t n = nmemb * size;
581 void *p = janet_smalloc(n);
582 memset(p, 0, n);
583 return p;
584 }
585
janet_srealloc(void * mem,size_t size)586 void *janet_srealloc(void *mem, size_t size) {
587 if (NULL == mem) return janet_smalloc(size);
588 JanetScratch *s = janet_mem2scratch(mem);
589 if (janet_vm.scratch_len) {
590 for (size_t i = janet_vm.scratch_len - 1; ; i--) {
591 if (janet_vm.scratch_mem[i] == s) {
592 JanetScratch *news = janet_realloc(s, size + sizeof(JanetScratch));
593 if (NULL == news) {
594 JANET_OUT_OF_MEMORY;
595 }
596 janet_vm.scratch_mem[i] = news;
597 return (char *)(news->mem);
598 }
599 if (i == 0) break;
600 }
601 }
602 JANET_EXIT("invalid janet_srealloc");
603 }
604
janet_sfinalizer(void * mem,JanetScratchFinalizer finalizer)605 void janet_sfinalizer(void *mem, JanetScratchFinalizer finalizer) {
606 JanetScratch *s = janet_mem2scratch(mem);
607 s->finalize = finalizer;
608 }
609
janet_sfree(void * mem)610 void janet_sfree(void *mem) {
611 if (NULL == mem) return;
612 JanetScratch *s = janet_mem2scratch(mem);
613 if (janet_vm.scratch_len) {
614 for (size_t i = janet_vm.scratch_len - 1; ; i--) {
615 if (janet_vm.scratch_mem[i] == s) {
616 janet_vm.scratch_mem[i] = janet_vm.scratch_mem[--janet_vm.scratch_len];
617 free_one_scratch(s);
618 return;
619 }
620 if (i == 0) break;
621 }
622 }
623 JANET_EXIT("invalid janet_sfree");
624 }
625