1 // Copyright 2014 Wouter van Oortmerssen. All rights reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #ifndef LOBSTER_VMDATA
16 #define LOBSTER_VMDATA
17
18 #include "il.h"
19
20 namespace bytecode { struct BytecodeFile; } // FIXME
21
22 namespace lobster {
23
24 #ifndef NDEBUG
25 #define RTT_ENABLED 1
26 #define RTT_TYPE_ERRORS 1
27 #else
28 #define RTT_ENABLED 0
29 #define RTT_TYPE_ERRORS 0
30 #endif
31
32 // These are used with VM_COMPILED_CODE_MODE
33 #define VM_DISPATCH_TRAMPOLINE 1
34 #define VM_DISPATCH_SWITCH_GOTO 2
35 #define VM_DISPATCH_METHOD VM_DISPATCH_TRAMPOLINE
36
37 #define STRING_CONSTANTS_KEEP 0
38
39 #define DELETE_DELAY 0
40
41 // Typedefs to make pointers and scalars the same size.
42 #if _WIN64 || __amd64__ || __x86_64__ || __ppc64__ || __LP64__
43 #if !defined(VM_COMPILED_CODE_MODE) || VM_DISPATCH_METHOD != VM_DISPATCH_TRAMPOLINE
44 //#define FORCE_32_BIT_MODEL
45 #endif
46 #ifndef FORCE_32_BIT_MODEL
47 #define VALUE_MODEL_64 1
48 #else
49 #define VALUE_MODEL_64 0
50 #endif
51 #else
52 #define VALUE_MODEL_64 0
53 #endif
54
55 #if VALUE_MODEL_64
56 typedef int64_t intp;
57 typedef uint64_t uintp;
58 typedef double floatp;
59 #else
60 typedef int32_t intp;
61 typedef uint32_t uintp;
62 typedef float floatp;
63 #endif
64
65 typedef vec<floatp, 2> floatp2;
66 typedef vec<floatp, 3> floatp3;
67 typedef vec<floatp, 4> floatp4;
68
69 typedef vec<intp, 2> intp2;
70 typedef vec<intp, 3> intp3;
71 typedef vec<intp, 4> intp4;
72
73 const floatp4 floatp4_0 = floatp4(0.0f);
74 const floatp3 floatp3_0 = floatp3(0.0f);
75 const floatp2 floatp2_0 = floatp2(0.0f);
76
77 const intp2 intp2_0 = intp2((intp)0);
78 const intp2 intp2_1 = intp2(1);
79 const intp3 intp3_0 = intp3((intp)0);
80
81
82 enum ValueType : int {
83 // refc types are negative
84 V_MINVMTYPES = -10,
85 V_ANY = -9, // any other reference type.
86 V_STACKFRAMEBUF = -8,
87 V_VALUEBUF = -7, // only used as memory type for vector/coro buffers, not used by Value.
88 V_STRUCT_R = -6,
89 V_RESOURCE = -5,
90 V_COROUTINE = -4,
91 V_STRING = -3,
92 V_CLASS = -2,
93 V_VECTOR = -1,
94 V_NIL = 0, // VM: null reference, Type checker: nillable.
95 V_INT,
96 V_FLOAT,
97 V_FUNCTION,
98 V_YIELD,
99 V_STRUCT_S,
100 V_VAR, // [typechecker only] like V_ANY, except idx refers to a type variable
101 V_TYPEID, // [typechecker only] a typetable offset.
102 V_VOID, // [typechecker/codegen only] this exp does not produce a value.
103 V_TUPLE, // [typechecker/codegen only] this exp produces >1 value.
104 V_UNDEFINED, // [typechecker only] this type should never be accessed.
105 V_MAXVMTYPES
106 };
107
IsScalar(ValueType t)108 inline bool IsScalar(ValueType t) { return t == V_INT || t == V_FLOAT; }
IsUnBoxed(ValueType t)109 inline bool IsUnBoxed(ValueType t) { return t == V_INT || t == V_FLOAT || t == V_FUNCTION; }
IsRef(ValueType t)110 inline bool IsRef(ValueType t) { return t < V_NIL; }
IsRefNil(ValueType t)111 inline bool IsRefNil(ValueType t) { return t <= V_NIL; }
IsRefNilVar(ValueType t)112 inline bool IsRefNilVar(ValueType t) { return t <= V_NIL || t == V_VAR; }
IsRefNilStruct(ValueType t)113 inline bool IsRefNilStruct(ValueType t) { return t <= V_NIL || t == V_STRUCT_S; }
IsRefNilNoStruct(ValueType t)114 inline bool IsRefNilNoStruct(ValueType t) { return t <= V_NIL && t != V_STRUCT_R; }
IsRuntime(ValueType t)115 inline bool IsRuntime(ValueType t) { return t < V_VAR; }
IsRuntimePrintable(ValueType t)116 inline bool IsRuntimePrintable(ValueType t) { return t <= V_FLOAT; }
IsStruct(ValueType t)117 inline bool IsStruct(ValueType t) { return t == V_STRUCT_R || t == V_STRUCT_S; }
IsUDT(ValueType t)118 inline bool IsUDT(ValueType t) { return t == V_CLASS || IsStruct(t); }
IsNillable(ValueType t)119 inline bool IsNillable(ValueType t) { return IsRef(t) && t != V_STRUCT_R; }
120
BaseTypeName(ValueType t)121 inline string_view BaseTypeName(ValueType t) {
122 static const char *typenames[] = {
123 "any", "<stackframe_buffer>", "<value_buffer>",
124 "struct_ref",
125 "resource", "coroutine", "string", "class", "vector",
126 "nil", "int", "float", "function", "yield_function", "struct_scalar",
127 "variable", "typeid", "void",
128 "tuple", "undefined",
129 };
130 if (t <= V_MINVMTYPES || t >= V_MAXVMTYPES) {
131 assert(false);
132 return "<internal-error-type>";
133 }
134 return typenames[t - V_MINVMTYPES - 1];
135 }
136
137 enum type_elem_t : int { // Strongly typed element of typetable.
138 // These must correspond to typetable init in Codegen constructor.
139 TYPE_ELEM_INT = 0, // This has -1 for its enumidx.
140 TYPE_ELEM_FLOAT = 2,
141 TYPE_ELEM_STRING = 3,
142 TYPE_ELEM_RESOURCE = 4,
143 TYPE_ELEM_ANY = 5,
144 TYPE_ELEM_VALUEBUF = 6,
145 TYPE_ELEM_STACKFRAMEBUF = 7,
146 TYPE_ELEM_VECTOR_OF_INT = 8, // 2 each.
147 TYPE_ELEM_VECTOR_OF_FLOAT = 10,
148 TYPE_ELEM_VECTOR_OF_STRING = 12,
149 TYPE_ELEM_VECTOR_OF_VECTOR_OF_INT = 14,
150 TYPE_ELEM_VECTOR_OF_VECTOR_OF_FLOAT = 16,
151
152 TYPE_ELEM_FIXED_OFFSET_END = 18
153 };
154
155 struct VM;
156
157 struct TypeInfo {
158 ValueType t;
159 union {
160 type_elem_t subt; // V_VECTOR | V_NIL
161 struct { // V_CLASS, V_STRUCT_*
162 int structidx;
163 int len;
164 int vtable_start;
165 type_elem_t elemtypes[1]; // len elems, followed by len parent types.
166 };
167 int enumidx; // V_INT, -1 if not an enum.
168 int sfidx; // V_FUNCTION;
169 struct { // V_COROUTINE
170 int cofunidx;
171 type_elem_t yieldtype;
172 };
173 };
174
175 TypeInfo() = delete;
176 TypeInfo(const TypeInfo &) = delete;
177 TypeInfo &operator=(const TypeInfo &) = delete;
178
179 string Debug(VM &vm, bool rec = true) const;
180
GetElemOrParentTypeInfo181 type_elem_t GetElemOrParent(intp i) const {
182 auto pti = elemtypes[len + i];
183 return pti >= 0 ? pti : elemtypes[i];
184 }
185 };
186
187 struct Value;
188 struct LString;
189 struct LVector;
190 struct LObject;
191 struct LCoRoutine;
192
193 struct PrintPrefs {
194 intp depth;
195 intp budget;
196 bool quoted;
197 intp decimals;
198 int cycles = -1;
199 int indent = 0;
200 int cur_indent = 0;
201
PrintPrefsPrintPrefs202 PrintPrefs(intp _depth, intp _budget, bool _quoted, intp _decimals)
203 : depth(_depth), budget(_budget), quoted(_quoted), decimals(_decimals) {}
204 };
205
206 typedef void *(*block_base_t)(VM &);
207 #if VM_DISPATCH_METHOD == VM_DISPATCH_TRAMPOLINE
208 typedef block_base_t block_t;
209 #elif VM_DISPATCH_METHOD == VM_DISPATCH_SWITCH_GOTO
210 typedef intp block_t;
211 #endif
212
213 // ANY memory allocated by the VM must inherit from this, so we can identify leaked memory
214 struct DynAlloc {
215 type_elem_t tti; // offset into the VM's typetable
216 const TypeInfo &ti(VM &vm) const;
217
DynAllocDynAlloc218 DynAlloc(type_elem_t _tti) : tti(_tti) {}
219 };
220
221 struct RefObj : DynAlloc {
222 int refc = 1;
223
224 #if DELETE_DELAY
225 const int *alloc_ip;
226 #endif
227
RefObjRefObj228 RefObj(type_elem_t _tti) : DynAlloc(_tti)
229 #if DELETE_DELAY
230 , alloc_ip(nullptr)
231 #endif
232 {}
233
IncRefObj234 void Inc() {
235 #ifndef NDEBUG
236 if (refc <= 0) { // Should never be "re-vived".
237 #if DELETE_DELAY
238 LOG_DEBUG("revive: ", (size_t)this);
239 #endif
240 assert(false);
241 }
242 #endif
243 #if DELETE_DELAY
244 LOG_DEBUG("inc: ", (size_t)this);
245 #endif
246 refc++;
247 }
248
DecRefObj249 void Dec(VM &vm) {
250 refc--;
251 #ifndef NDEBUG
252 DECSTAT(vm);
253 #endif
254 #if DELETE_DELAY
255 LOG_DEBUG("dec: ", (size_t)this);
256 #endif
257 if (refc <= 0) {
258 DECDELETE(vm);
259 }
260 }
261
CycleStrRefObj262 void CycleStr(ostringstream &ss) const { ss << "_" << -refc << "_"; }
263
CycleCheckRefObj264 bool CycleCheck(ostringstream &ss, PrintPrefs &pp) {
265 if (pp.cycles >= 0) {
266 if (refc < 0) { CycleStr(ss); return true; }
267 refc = -(++pp.cycles);
268 }
269 return false;
270 }
271
272 void DECDELETE(VM &vm);
273 void DECDELETENOW(VM &vm);
274 void DECSTAT(VM &vm);
275
276 intp Hash(VM &vm);
277 };
278
279 extern bool RefEqual(VM &vm, const RefObj *a, const RefObj *b, bool structural);
280 extern void RefToString(VM &vm, ostringstream &ss, const RefObj *ro, PrintPrefs &pp);
281
282 struct LString : RefObj {
283 intp len; // has to match the Value integer type, since we allow the length to be obtained
284 LString(intp _l);
285
dataLString286 const char *data() const { return (char *)(this + 1); }
strvLString287 string_view strv() const { return string_view(data(), len); }
288
289 void ToString(ostringstream &ss, PrintPrefs &pp);
290
291 void DeleteSelf(VM &vm);
292
293 bool operator==(LString &o) { return strv() == o.strv(); }
294 bool operator!=(LString &o) { return strv() != o.strv(); }
295 bool operator< (LString &o) { return strv() < o.strv(); }
296 bool operator<=(LString &o) { return strv() <= o.strv(); }
297 bool operator> (LString &o) { return strv() > o.strv(); }
298 bool operator>=(LString &o) { return strv() >= o.strv(); }
299
300 intp Hash();
301 };
302
303 // There must be a single of these per type, since they are compared by pointer.
304 struct ResourceType {
305 const char *name;
306 void (* deletefun)(void *);
307 };
308
309 struct LResource : RefObj {
310 void *val;
311 const ResourceType *type;
312
313 LResource(void *v, const ResourceType *t);
314
315 void DeleteSelf(VM &vm);
316
ToStringLResource317 void ToString(ostringstream &ss) {
318 ss << "(resource:" << type->name << ")";
319 }
320 };
321
322 struct InsPtr {
323 #ifdef VM_COMPILED_CODE_MODE
324 block_t f;
InsPtrInsPtr325 explicit InsPtr(block_t _f) : f(_f) {}
326 static_assert(sizeof(block_t) == sizeof(intp), "");
327 #else
328 intp f;
329 explicit InsPtr(intp _f) : f(_f) {}
330 #ifdef FORCE_32_BIT_MODEL
331 explicit InsPtr(ptrdiff_t _f) : f((intp)_f) {}
332 #endif
333 #endif
InsPtrInsPtr334 InsPtr() : f(0) {}
335 bool operator==(const InsPtr o) const { return f == o.f; }
336 bool operator!=(const InsPtr o) const { return f != o.f; }
337 };
338
339 #if RTT_ENABLED
340 #define TYPE_INIT(t) type(t),
341 #else
342 #define TYPE_INIT(t)
343 #endif
344
345 // These pointer types are for use inside Value below. In most other parts of the code we
346 // use naked pointers.
347 #ifndef FORCE_32_BIT_MODEL
348 // We use regular pointers of the current architecture.
349 typedef LString *LStringPtr;
350 typedef LVector *LVectorPtr;
351 typedef LObject *LStructPtr;
352 typedef LCoRoutine *LCoRoutinePtr;
353 typedef LResource *LResourcePtr;
354 typedef RefObj *RefObjPtr;
355 #else
356 // We use a compressed pointer to fit in 32-bit on a 64-bit build.
357 // These are shifted by COMPRESS_BITS, so for 3 we can address the bottom 32GB of the
358 // address space. The memory allocator for these values must guarantee we only allocate
359 // from that region, by using mmap or similar.
360 template<typename T> class CompressedPtr {
361 uint32_t c;
362 enum { COMPRESS_BITS = 3, COMPRESS_MASK = (1 << COMPRESS_BITS) - 1 };
363 public:
CompressedPtr(const T * p)364 CompressedPtr(const T *p) {
365 auto bits = (size_t)p;
366 assert(!(bits & COMPRESS_MASK)); // Must not have low bits set.
367 bits >>= COMPRESS_BITS;
368 assert(!(bits >> 32)); // Must not have high bits set.
369 c = (uint32_t)bits;
370 }
get()371 T *get() const { return (T *)(((size_t)c) << COMPRESS_BITS); }
372 operator T *() const { return get(); }
373 T *operator->() const { return get(); }
374 };
375 typedef CompressedPtr<LString> LStringPtr;
376 typedef CompressedPtr<LVector> LVectorPtr;
377 typedef CompressedPtr<LObject> LStructPtr;
378 typedef CompressedPtr<LCoRoutine> LCoRoutinePtr;
379 typedef CompressedPtr<LResource> LResourcePtr;
380 typedef CompressedPtr<BoxedInt> BoxedIntPtr;
381 typedef CompressedPtr<BoxedFloat> BoxedFloatPtr;
382 typedef CompressedPtr<RefObj> RefObjPtr;
383 #endif
384
385 static_assert(sizeof(intp) == sizeof(floatp) && sizeof(intp) == sizeof(RefObjPtr),
386 "typedefs need fixing");
387
388 struct Value {
389 #if RTT_ENABLED
390 ValueType type;
391 #endif
392
393 private:
394 union {
395 // All these types can be defined to be either all 32 or 64-bit, depending on the
396 // compilation mode.
397
398 // Non-reference values.
399 intp ival_; // scalars stored as pointer-sized versions.
400 floatp fval_;
401 InsPtr ip_;
402
403 // Reference values (includes NULL if nillable version).
404 LStringPtr sval_;
405 LVectorPtr vval_;
406 LStructPtr oval_;
407 LCoRoutinePtr cval_;
408 LResourcePtr xval_;
409
410 // Generic reference access.
411 RefObjPtr ref_;
412
413 // Temp: for inline structs.
414 TypeInfo *ti_;
415 };
416 public:
417
418 // These asserts help track down any invalid code generation issues.
ivalValue419 intp ival () const { assert(type == V_INT); return ival_; }
fvalValue420 floatp fval () const { assert(type == V_FLOAT); return fval_; }
intvalValue421 int intval() const { assert(type == V_INT); return (int)ival_; }
fltvalValue422 float fltval() const { assert(type == V_FLOAT); return (float)fval_; }
svalValue423 LString *sval () const { assert(type == V_STRING); return sval_; }
vvalValue424 LVector *vval () const { assert(type == V_VECTOR); return vval_; }
ovalValue425 LObject *oval () const { assert(type == V_CLASS); return oval_; }
cvalValue426 LCoRoutine *cval () const { assert(type == V_COROUTINE); return cval_; }
xvalValue427 LResource *xval () const { assert(type == V_RESOURCE); return xval_; }
refValue428 RefObj *ref () const { assert(IsRef(type)); return ref_; }
refnilValue429 RefObj *refnil() const { assert(IsRefNil(type)); return ref_; }
ipValue430 InsPtr ip () const { assert(type >= V_FUNCTION); return ip_; }
anyValue431 void *any () const { return ref_; }
tivalValue432 TypeInfo *tival () const { assert(type == V_STRUCT_S); return ti_; }
433
ifvalValue434 template<typename T> T ifval() const {
435 if constexpr (is_floating_point<T>()) { assert(type == V_FLOAT); return (T)fval_; }
436 else { assert(type == V_INT); return (T)ival_; }
437 }
438
setivalValue439 void setival(intp i) { assert(type == V_INT); ival_ = i; }
setfvalValue440 void setfval(floatp f) { assert(type == V_FLOAT); fval_ = f; }
441
ValueValue442 inline Value() : TYPE_INIT(V_NIL) ref_(nullptr) {}
ValueValue443 inline Value(int i) : TYPE_INIT(V_INT) ival_(i) {}
ValueValue444 inline Value(uint i) : TYPE_INIT(V_INT) ival_((intp)i) {}
ValueValue445 inline Value(int64_t i) : TYPE_INIT(V_INT) ival_((intp)i) {}
ValueValue446 inline Value(uint64_t i) : TYPE_INIT(V_INT) ival_((intp)i) {}
ValueValue447 inline Value(int i, ValueType t) : TYPE_INIT(t) ival_(i) { (void)t; }
ValueValue448 inline Value(bool b) : TYPE_INIT(V_INT) ival_(b) {}
ValueValue449 inline Value(float f) : TYPE_INIT(V_FLOAT) fval_(f) {}
ValueValue450 inline Value(double f) : TYPE_INIT(V_FLOAT) fval_((floatp)f) {}
ValueValue451 inline Value(InsPtr i) : TYPE_INIT(V_FUNCTION) ip_(i) {}
452
ValueValue453 inline Value(LString *s) : TYPE_INIT(V_STRING) sval_(s) {}
ValueValue454 inline Value(LVector *v) : TYPE_INIT(V_VECTOR) vval_(v) {}
ValueValue455 inline Value(LObject *s) : TYPE_INIT(V_CLASS) oval_(s) {}
ValueValue456 inline Value(LCoRoutine *c) : TYPE_INIT(V_COROUTINE) cval_(c) {}
ValueValue457 inline Value(LResource *r) : TYPE_INIT(V_RESOURCE) xval_(r) {}
ValueValue458 inline Value(RefObj *r) : TYPE_INIT(V_NIL) ref_(r) { assert(false); }
459
ValueValue460 inline Value(TypeInfo *ti) : TYPE_INIT(V_STRUCT_S) ti_(ti) {}
461
TrueValue462 inline bool True() const { return ival_ != 0; }
463
LTINCRTValue464 inline Value <INCRT() {
465 assert(IsRef(type) && ref_);
466 ref_->Inc();
467 return *this;
468 }
LTINCRTNILValue469 inline Value <INCRTNIL() {
470 // Can't assert IsRefNil here, since scalar 0 are valid NIL values due to e.g. and/or.
471 if (ref_) LTINCRT();
472 return *this;
473 }
LTINCTYPEValue474 inline Value <INCTYPE(ValueType t) {
475 return IsRefNil(t) ? LTINCRTNIL() : *this;
476 }
477
LTDECRTValue478 inline void LTDECRT(VM &vm) const { // we already know its a ref type
479 assert(IsRef(type) && ref_);
480 ref_->Dec(vm);
481 }
LTDECRTNILValue482 inline void LTDECRTNIL(VM &vm) const {
483 // Can't assert IsRefNil here, since scalar 0 are valid NIL values due to e.g. and/or.
484 if (ref_) LTDECRT(vm);
485 }
LTDECTYPEValue486 inline void LTDECTYPE(VM &vm, ValueType t) const {
487 if (IsRefNil(t)) LTDECRTNIL(vm);
488 }
489
490 void ToString(VM &vm, ostringstream &ss, const TypeInfo &ti, PrintPrefs &pp) const;
491 void ToStringBase(VM &vm, ostringstream &ss, ValueType t, PrintPrefs &pp) const;
492
493 bool Equal(VM &vm, ValueType vtype, const Value &o, ValueType otype, bool structural) const;
494 intp Hash(VM &vm, ValueType vtype);
495 Value Copy(VM &vm); // Shallow.
496 };
497
498 template<typename T> inline T *AllocSubBuf(VM &vm, size_t size, type_elem_t tti);
499 template<typename T> inline void DeallocSubBuf(VM &vm, T *v, size_t size);
500
501 struct LObject : RefObj {
LObjectLObject502 LObject(type_elem_t _tti) : RefObj(_tti) {}
503
504 // FIXME: reduce the use of these.
LenLObject505 intp Len(VM &vm) const { return ti(vm).len; }
506
ElemsLObject507 Value *Elems() const { return (Value *)(this + 1); }
508
509 // This may only be called from a context where i < len has already been ensured/asserted.
AtSLObject510 Value &AtS(intp i) const {
511 return Elems()[i];
512 }
513
514 void DeleteSelf(VM &vm);
515
516 // This may only be called from a context where i < len has already been ensured/asserted.
517 const TypeInfo &ElemTypeS(VM &vm, intp i) const;
518 const TypeInfo &ElemTypeSP(VM &vm, intp i) const;
519
520 void ToString(VM &vm, ostringstream &ss, PrintPrefs &pp);
521
EqualLObject522 bool Equal(VM &vm, const LObject &o) {
523 // RefObj::Equal has already guaranteed the typeoff's are the same.
524 auto len = Len(vm);
525 assert(len == o.Len(vm));
526 for (intp i = 0; i < len; i++) {
527 auto et = ElemTypeS(vm, i).t;
528 if (!AtS(i).Equal(vm, et, o.AtS(i), et, true))
529 return false;
530 }
531 return true;
532 }
533
HashLObject534 intp Hash(VM &vm) {
535 intp hash = 0;
536 for (int i = 0; i < Len(vm); i++) hash ^= AtS(i).Hash(vm, ElemTypeS(vm, i).t);
537 return hash;
538 }
539
InitLObject540 void Init(VM &vm, Value *from, intp len, bool inc) {
541 assert(len && len == Len(vm));
542 t_memcpy(Elems(), from, len);
543 if (inc) for (intp i = 0; i < len; i++) {
544 AtS(i).LTINCTYPE(ElemTypeS(vm, i).t);
545 }
546 }
547 };
548
549 struct LVector : RefObj {
550 intp len; // has to match the Value integer type, since we allow the length to be obtained
551 intp maxl;
552 intp width; // TODO: would be great to not have to store this.
553
554 private:
555 Value *v; // use At()
556
557 public:
558 LVector(VM &vm, intp _initial, intp _max, type_elem_t _tti);
559
~LVectorLVector560 ~LVector() { assert(0); } // destructed by DECREF
561
DeallocBufLVector562 void DeallocBuf(VM &vm) {
563 if (v) DeallocSubBuf(vm, v, maxl * width);
564 }
565
DecSlotLVector566 void DecSlot(VM &vm, intp i, ValueType et) const {
567 AtSlot(i).LTDECTYPE(vm, et);
568 }
569
570 void DeleteSelf(VM &vm);
571
572 const TypeInfo &ElemType(VM &vm) const;
573
574 void Resize(VM &vm, intp newmax);
575
PushLVector576 void Push(VM &vm, const Value &val) {
577 assert(width == 1);
578 if (len == maxl) Resize(vm, maxl ? maxl * 2 : 4);
579 v[len++] = val;
580 }
581
PushVWLVector582 void PushVW(VM &vm, const Value *vals) {
583 if (len == maxl) Resize(vm, maxl ? maxl * 2 : 4);
584 tsnz_memcpy(v + len * width, vals, width);
585 len++;
586 }
587
PopLVector588 Value Pop() {
589 assert(width == 1);
590 return v[--len];
591 }
592
PopVWLVector593 void PopVW(Value *dest) {
594 len--;
595 tsnz_memcpy(dest, v + len * width, width);
596 }
597
TopLVector598 Value &Top() const {
599 assert(width == 1);
600 return v[len - 1];
601 }
602
TopVWLVector603 void TopVW(Value *dest) {
604 tsnz_memcpy(dest, v + (len - 1) * width, width);
605 }
606
InsertLVector607 void Insert(VM &vm, const Value *vals, intp i) {
608 assert(i >= 0 && i <= len); // note: insertion right at the end is legal, hence <=
609 if (len + 1 > maxl) Resize(vm, max(len + 1, maxl ? maxl * 2 : 4));
610 t_memmove(v + (i + 1) * width, v + i * width, (len - i) * width);
611 len++;
612 tsnz_memcpy(v + i * width, vals, width);
613 }
614
615 void Remove(VM &vm, intp i, intp n, intp decfrom, bool stack_ret);
616
ElemsLVector617 Value *Elems() { return v; }
618
AtLVector619 Value &At(intp i) const {
620 assert(i < len && width == 1);
621 return v[i];
622 }
623
AtStLVector624 Value *AtSt(intp i) const {
625 assert(i < len);
626 return v + i * width;
627 }
628
AtSlotLVector629 Value &AtSlot(intp i) const {
630 assert(i < len * width);
631 return v[i];
632 }
633
634 void AtVW(VM &vm, intp i) const;
635 void AtVWSub(VM &vm, intp i, int w, int off) const;
636
637 void Append(VM &vm, LVector *from, intp start, intp amount);
638
639 void ToString(VM &vm, ostringstream &ss, PrintPrefs &pp);
640
EqualLVector641 bool Equal(VM &vm, const LVector &o) {
642 // RefObj::Equal has already guaranteed the typeoff's are the same.
643 assert(width == 1);
644 if (len != o.len) return false;
645 auto et = ElemType(vm).t;
646 for (intp i = 0; i < len; i++) {
647 if (!At(i).Equal(vm, et, o.At(i), et, true))
648 return false;
649 }
650 return true;
651 }
652
HashLVector653 intp Hash(VM &vm) {
654 intp hash = 0;
655 assert(width == 1);
656 auto et = ElemType(vm).t;
657 for (int i = 0; i < len; i++) hash ^= At(i).Hash(vm, et);
658 return hash;
659 }
660
InitLVector661 void Init(VM &vm, Value *from, bool inc) {
662 assert(len);
663 t_memcpy(v, from, len * width);
664 auto et = ElemType(vm).t;
665 if (inc && IsRefNil(et)) {
666 for (intp i = 0; i < len; i++) {
667 At(i).LTINCRTNIL();
668 }
669 }
670 }
671 };
672
673 struct VMLog {
674 struct LogVar {
675 vector<Value> values;
676 size_t read;
677 const TypeInfo *type;
678 };
679 vector<LogVar> logvars;
680
681 VM &vm;
682 VMLog(VM &_vm);
683
684 void LogInit(const uchar *bcf);
685 void LogPurge();
686 void LogFrame();
687 Value LogGet(Value def, int idx);
688 void LogWrite(Value newval, int idx);
689 void LogCleanup();
690 };
691
692 struct StackFrame {
693 InsPtr retip;
694 const int *funstart;
695 int spstart;
696 };
697
698 struct NativeFun;
699 struct NativeRegistry;
700
701 // This contains all data shared between threads.
702 struct TupleSpace {
703 struct TupleType {
704 // We have an independent list of tuples and synchronization per type, for minimum
705 // contention.
706 list<Value *> tuples;
707 mutex mtx;
708 condition_variable condition;
709 };
710 vector<TupleType> tupletypes;
711
712 atomic<bool> alive = true;
713
TupleSpaceTupleSpace714 TupleSpace(size_t numstructs) : tupletypes(numstructs) {}
715
~TupleSpaceTupleSpace716 ~TupleSpace() {
717 for (auto &tt : tupletypes) for (auto p : tt.tuples) delete[] p;
718 }
719 };
720
721 enum class TraceMode { OFF, ON, TAIL };
722
723 struct VMArgs {
724 NativeRegistry 𝔫
725 string_view programname;
726 string bytecode_buffer;
727 const void *entry_point = nullptr;
728 const void *static_bytecode = nullptr;
729 size_t static_size = 0;
730 vector<string> program_args;
731 const lobster::block_t *native_vtables = nullptr;
732 TraceMode trace = TraceMode::OFF;
733 };
734
735 struct VM : VMArgs {
736 SlabAlloc pool;
737
738 Value *stack = nullptr;
739 int stacksize = 0;
740 int maxstacksize;
741 int sp = -1;
742
743 Value retvalstemp[MAX_RETURN_VALUES];
744
745 #ifdef VM_COMPILED_CODE_MODE
746 block_t next_call_target = 0;
747 #else
748 const int *ip = nullptr;
749 #endif
750
751 vector<StackFrame> stackframes;
752
753 LCoRoutine *curcoroutine = nullptr;
754
755 Value *vars = nullptr;
756
757 size_t codelen = 0;
758 const int *codestart = nullptr;
759 vector<int> codebigendian;
760 vector<type_elem_t> typetablebigendian;
761 uint64_t *byteprofilecounts = nullptr;
762
763 const bytecode::BytecodeFile *bcf = nullptr;
764
765 PrintPrefs programprintprefs { 10, 100000, false, -1 };
766 const type_elem_t *typetable = nullptr;
767 string evalret;
768
769 int currentline = -1;
770 int maxsp = -1;
771
772 PrintPrefs debugpp { 2, 50, true, -1 };
773
774 VMLog vml { *this };
775
776 ostringstream ss_reuse;
777
778 vector<ostringstream> trace_output;
779 size_t trace_ring_idx = 0;
780
781 vector<RefObj *> delete_delay;
782
783 vector<LString *> constant_strings;
784
785 vector<InsPtr> vtables;
786
787 int64_t vm_count_ins = 0;
788 int64_t vm_count_fcalls = 0;
789 int64_t vm_count_bcalls = 0;
790 int64_t vm_count_decref = 0;
791
792 //#define VM_ERROR_RET_EXPERIMENT
793 #if defined(VM_ERROR_RET_EXPERIMENT) && !defined(VM_COMPILED_CODE_MODE)
794 #define VM_INS_RET bool
795 #define VM_RET return false
796 #define VM_TERMINATE return true
797 #else
798 #define VM_INS_RET void
799 #define VM_RET
800 #define VM_TERMINATE
801 #endif
802
803 typedef VM_INS_RET (VM::* f_ins_pointer)();
804 f_ins_pointer f_ins_pointers[IL_MAX_OPS];
805
806 const void *compiled_code_ip = nullptr;
807
808 bool is_worker = false;
809 vector<thread> workers;
810 TupleSpace *tuple_space = nullptr;
811
812 VM(VMArgs &&args);
813 ~VM();
814
815 void OneMoreFrame();
816
GetTypeInfoVM817 const TypeInfo &GetTypeInfo(type_elem_t offset) {
818 return *(TypeInfo *)(typetable + offset);
819 }
820 const TypeInfo &GetVarTypeInfo(int varidx);
821
SetMaxStackVM822 void SetMaxStack(int ms) { maxstacksize = ms; }
GetProgramNameVM823 string_view GetProgramName() { return programname; }
824
825 type_elem_t GetIntVectorType(int which);
826 type_elem_t GetFloatVectorType(int which);
827
828 void DumpVal(RefObj *ro, const char *prefix);
829 void DumpFileLine(const int *fip, ostringstream &ss);
830 void DumpLeaks();
831
832 ostringstream &TraceStream();
833
834 void OnAlloc(RefObj *ro);
835 LVector *NewVec(intp initial, intp max, type_elem_t tti);
836 LObject *NewObject(intp max, type_elem_t tti);
837 LCoRoutine *NewCoRoutine(InsPtr rip, const int *vip, LCoRoutine *p, type_elem_t tti);
838 LResource *NewResource(void *v, const ResourceType *t);
839 LString *NewString(size_t l);
840 LString *NewString(string_view s);
841 LString *NewString(string_view s1, string_view s2);
842 LString *ResizeString(LString *s, intp size, int c, bool back);
843
844 Value Error(string err, const RefObj *a = nullptr, const RefObj *b = nullptr);
BuiltinErrorVM845 Value BuiltinError(string err) { return Error(err); }
846 void VMAssert(const char *what);
847 void VMAssert(const char *what, const RefObj *a, const RefObj *b);
848
849 int DumpVar(ostringstream &ss, const Value &x, size_t idx, bool dumpglobals);
850
851 void FinalStackVarsCleanup();
852
853 void StartWorkers(size_t numthreads);
854 void TerminateWorkers();
855 void WorkerWrite(RefObj *ref);
856 LObject *WorkerRead(type_elem_t tti);
857
858 #ifdef VM_COMPILED_CODE_MODE
859 #define VM_COMMA ,
860 #define VM_OP_ARGS const int *ip
861 #define VM_OP_ARGS_CALL block_t fcont
862 #define VM_IP_PASS_THRU ip
863 #define VM_FC_PASS_THRU fcont
864 #define VM_JMP_RET bool
865 #else
866 #define VM_COMMA
867 #define VM_OP_ARGS
868 #define VM_OP_ARGS_CALL
869 #define VM_IP_PASS_THRU
870 #define VM_FC_PASS_THRU
871 #define VM_JMP_RET VM_INS_RET
872 #endif
873
874 void JumpTo(InsPtr j);
875 InsPtr GetIP();
876 template<int is_error> int VarCleanup(ostringstream *error, int towhere);
877 void StartStackFrame(InsPtr retip);
878 void FunIntroPre(InsPtr fun);
879 void FunIntro(VM_OP_ARGS);
880 void FunOut(int towhere, int nrv);
881
882 void CoVarCleanup(LCoRoutine *co);
883 void CoNonRec(const int *varip);
884 void CoNew(VM_OP_ARGS VM_COMMA VM_OP_ARGS_CALL);
885 void CoSuspend(InsPtr retip);
886 void CoClean();
887 void CoYield(VM_OP_ARGS_CALL);
888 void CoResume(LCoRoutine *co);
889
890 void EndEval(const Value &ret, const TypeInfo &ti);
891
InstructionPointerInitVM892 void InstructionPointerInit() {
893 #ifdef VM_COMPILED_CODE_MODE
894 #define F(N, A) f_ins_pointers[IL_##N] = nullptr;
895 #else
896 #define F(N, A) f_ins_pointers[IL_##N] = &VM::F_##N;
897 #endif
898 ILNAMES
899 #undef F
900 }
901
902 #define VM_OP_ARGS0
903 #define VM_OP_ARGS1 int _a
904 #define VM_OP_ARGS2 int _a, int _b
905 #define VM_OP_ARGS3 int _a, int _b, int _c
906 #define VM_OP_ARGS9 VM_OP_ARGS // ILUNKNOWNARITY
907 #define VM_OP_ARGSN(N) VM_OP_ARGS##N
908 #define VM_OP_DEFS0
909 #define VM_OP_DEFS1 int _a = *ip++;
910 #define VM_OP_DEFS2 int _a = *ip++; int _b = *ip++;
911 #define VM_OP_DEFS3 int _a = *ip++; int _b = *ip++; int _c = *ip++;
912 #define VM_OP_DEFS9 // ILUNKNOWNARITY
913 #define VM_OP_DEFSN(N) VM_OP_DEFS##N (void)ip;
914 #define VM_OP_PASS0
915 #define VM_OP_PASS1 _a
916 #define VM_OP_PASS2 _a, _b
917 #define VM_OP_PASS3 _a, _b, _c
918 #define VM_OP_PASS9 VM_IP_PASS_THRU // ILUNKNOWNARITY
919 #define VM_OP_PASSN(N) VM_OP_PASS##N
920 #define VM_COMMA_0
921 #define VM_COMMA_1 ,
922 #define VM_COMMA_2 ,
923 #define VM_COMMA_3 ,
924 #define VM_COMMA_9 ,
925 #define VM_COMMA_IF(N) VM_COMMA_##N
926 #define VM_CCOMMA_0
927 #define VM_CCOMMA_1 VM_COMMA
928 #define VM_CCOMMA_2 VM_COMMA
929 #define VM_CCOMMA_9 VM_COMMA
930 #define VM_CCOMMA_IF(N) VM_CCOMMA_##N
931
932 #define F(N, A) VM_INS_RET U_##N(VM_OP_ARGSN(A)); \
933 VM_INS_RET F_##N(VM_OP_ARGS) { \
934 VM_OP_DEFSN(A); \
935 return U_##N(VM_OP_PASSN(A)); \
936 }
937 LVALOPNAMES
938 #undef F
939 #define F(N, A) VM_INS_RET U_##N(VM_OP_ARGSN(A)); \
940 VM_INS_RET F_##N(VM_OP_ARGS) { \
941 VM_OP_DEFSN(A); \
942 return U_##N(VM_OP_PASSN(A)); \
943 }
944 ILBASENAMES
945 #undef F
946 #define F(N, A) VM_INS_RET U_##N(VM_OP_ARGSN(A) VM_CCOMMA_IF(A) VM_OP_ARGS_CALL); \
947 VM_INS_RET F_##N(VM_OP_ARGS VM_COMMA VM_OP_ARGS_CALL) { \
948 VM_OP_DEFSN(A); \
949 return U_##N(VM_OP_PASSN(A) VM_CCOMMA_IF(A) VM_FC_PASS_THRU); \
950 }
951 ILCALLNAMES
952 #undef F
953 #define F(N, A) VM_JMP_RET U_##N(); VM_JMP_RET F_##N() { return U_##N(); }
954 ILJUMPNAMES
955 #undef F
956
957 #pragma push_macro("LVAL")
958 #undef LVAL
959 #define LVAL(N, V) void LV_##N(Value &a VM_COMMA_IF(V) VM_OP_ARGSN(V));
960 LVALOPNAMES
961 #undef LVAL
962 #pragma pop_macro("LVAL")
963
964 void EvalProgram();
965 void EvalProgramInner();
966
967 VM_JMP_RET ForLoop(intp len);
968
969 Value &GetFieldLVal(intp i);
970 Value &GetFieldILVal(intp i);
971 Value &GetLocLVal(int i);
972 Value &GetVecLVal(intp i);
973
974 void PushDerefIdxVector(intp i);
975 void PushDerefIdxVectorSub(intp i, int width, int offset);
976 void PushDerefIdxStruct(intp i, int l);
977 void PushDerefIdxString(intp i);
978 void LvalueIdxVector(int lvalop, intp i);
979 void LvalueIdxStruct(int lvalop, intp i);
980 void LvalueField(int lvalop, intp i);
981 void LvalueOp(int op, Value &a);
982
983 string ProperTypeName(const TypeInfo &ti);
984
Div0VM985 void Div0() { Error("division by zero"); }
986 void IDXErr(intp i, intp n, const RefObj *v);
987 void BCallProf();
988 void BCallRetCheck(const NativeFun *nf);
989 intp GrabIndex(int len);
990
991 #define VM_PUSH(v) (stack[++sp] = (v))
992 #define VM_TOP() (stack[sp])
993 #define VM_TOPM(n) (stack[sp - (n)])
994 #define VM_POP() (stack[sp--])
995 #define VM_POPN(n) (sp -= (n))
996 #define VM_PUSHN(n) (sp += (n))
997 #define VM_TOPPTR() (stack + sp + 1)
998
PushVM999 void Push(const Value &v) { VM_PUSH(v); }
PopVM1000 Value Pop() { return VM_POP(); }
TopVM1001 Value Top() { return VM_TOP(); }
TopPtrVM1002 Value *TopPtr() { return VM_TOPPTR(); }
PushNVM1003 void PushN(int n) { VM_PUSHN(n); }
PopNVM1004 void PopN(int n) { VM_POPN(n); }
PopVecPtrVM1005 pair<Value *, int> PopVecPtr() {
1006 auto width = VM_POP().intval();
1007 VM_POPN(width);
1008 return { VM_TOPPTR(), width };
1009 }
1010 template<typename T, int N> void PushVec(const vec<T, N> &v, int truncate = 4) {
1011 auto l = min(N, truncate);
1012 for (int i = 0; i < l; i++) VM_PUSH(v[i]);
1013 }
1014 template<typename T> T PopVec(typename T::CTYPE def = 0) {
1015 T v;
1016 auto l = VM_POP().intval();
1017 if (l > T::NUM_ELEMENTS) VM_POPN(l - T::NUM_ELEMENTS);
1018 for (int i = T::NUM_ELEMENTS - 1; i >= 0; i--) {
1019 v[i] = i < l ? VM_POP().ifval<typename T::CTYPE>() : def;
1020 }
1021 return v;
1022 }
PushAnyAsStringVM1023 template<typename T> void PushAnyAsString(const T &t) {
1024 Push(NewString(string_view((char *)&t, sizeof(T))));
1025 }
1026
PopAnyFromStringVM1027 template<typename T> void PopAnyFromString(T &t) {
1028 auto s = Pop();
1029 assert(s.type == V_STRING);
1030 assert(s.sval()->len == sizeof(T));
1031 t = *(T *)s.sval()->strv().data();
1032 s.LTDECRT(*this);
1033 }
1034
1035 string_view StructName(const TypeInfo &ti);
1036 string_view ReverseLookupType(uint v);
TraceVM1037 void Trace(TraceMode m) { trace = m; }
TimeVM1038 double Time() { return SecondsSinceStart(); }
1039
ToStringVM1040 Value ToString(const Value &a, const TypeInfo &ti) {
1041 ss_reuse.str(string());
1042 ss_reuse.clear();
1043 a.ToString(*this, ss_reuse, ti, programprintprefs);
1044 return NewString(ss_reuse.str());
1045 }
StructToStringVM1046 Value StructToString(const Value *elems, const TypeInfo &ti) {
1047 ss_reuse.str(string());
1048 ss_reuse.clear();
1049 StructToString(ss_reuse, programprintprefs, ti, elems);
1050 return NewString(ss_reuse.str());
1051 }
1052 void StructToString(ostringstream &ss, PrintPrefs &pp, const TypeInfo &ti, const Value *elems);
1053
1054 string_view EnumName(intp val, int enumidx);
1055 string_view EnumName(int enumidx);
1056 optional<int64_t> LookupEnum(string_view name, int enumidx);
1057 };
1058
Int64FromInts(int a,int b)1059 inline int64_t Int64FromInts(int a, int b) {
1060 int64_t v = (uint)a;
1061 v |= ((int64_t)b) << 32;
1062 return v;
1063 }
1064
ti(VM & vm)1065 inline const TypeInfo &DynAlloc::ti(VM &vm) const { return vm.GetTypeInfo(tti); }
1066
AllocSubBuf(VM & vm,size_t size,type_elem_t tti)1067 template<typename T> inline T *AllocSubBuf(VM &vm, size_t size, type_elem_t tti) {
1068 auto header_sz = max(alignof(T), sizeof(DynAlloc));
1069 auto mem = (uchar *)vm.pool.alloc(size * sizeof(T) + header_sz);
1070 ((DynAlloc *)mem)->tti = tti;
1071 mem += header_sz;
1072 return (T *)mem;
1073 }
1074
DeallocSubBuf(VM & vm,T * v,size_t size)1075 template<typename T> inline void DeallocSubBuf(VM &vm, T *v, size_t size) {
1076 auto header_sz = max(alignof(T), sizeof(DynAlloc));
1077 auto mem = ((uchar *)v) - header_sz;
1078 vm.pool.dealloc(mem, size * sizeof(T) + header_sz);
1079 }
1080
WriteMem(VM & vm,LString * s,intp i,const void * data,size_t size)1081 template<bool back> LString *WriteMem(VM &vm, LString *s, intp i, const void *data, size_t size) {
1082 auto minsize = i + (intp)size;
1083 if (s->len < minsize) s = vm.ResizeString(s, minsize * 2, 0, back);
1084 memcpy((void *)(s->data() + (back ? s->len - i - size : i)), data, size);
1085 return s;
1086 }
1087
WriteValLE(VM & vm,LString * s,intp i,T val)1088 template<typename T, bool back> LString *WriteValLE(VM &vm, LString *s, intp i, T val) {
1089 T t = flatbuffers::EndianScalar(val);
1090 return WriteMem<back>(vm, s, i, &t, sizeof(T));
1091 }
1092
ReadValLE(const LString * s,intp i)1093 template<typename T, bool back> T ReadValLE(const LString *s, intp i) {
1094 T val;
1095 memcpy(&val, (void *)(s->data() + (back ? s->len - i - sizeof(T) : i)), sizeof(T));
1096 return flatbuffers::EndianScalar(val);
1097 }
1098
1099
1100 // FIXME: turn check for len into an assert and make caller guarantee lengths match.
1101 template<int N> inline vec<floatp, N> ValueToF(const Value *v, intp width, floatp def = 0) {
1102 vec<floatp, N> t;
1103 for (int i = 0; i < N; i++) t[i] = width > i ? (v + i)->fval() : def;
1104 return t;
1105 }
1106 template<int N> inline vec<intp, N> ValueToI(const Value *v, intp width, intp def = 0) {
1107 vec<intp, N> t;
1108 for (int i = 0; i < N; i++) t[i] = width > i ? (v + i)->ival() : def;
1109 return t;
1110 }
1111 template<int N> inline vec<float, N> ValueToFLT(const Value *v, intp width, float def = 0) {
1112 vec<float, N> t;
1113 for (int i = 0; i < N; i++) t[i] = width > i ? (v + i)->fltval() : def;
1114 return t;
1115 }
1116 template<int N> inline vec<int, N> ValueToINT(const Value *v, intp width, int def = 0) {
1117 vec<int, N> t;
1118 for (int i = 0; i < N; i++) t[i] = width > i ? (v + i)->intval() : def;
1119 return t;
1120 }
1121
ToValue(Value * dest,intp width,const vec<T,N> & v)1122 template <typename T, int N> inline void ToValue(Value *dest, intp width, const vec<T, N> &v) {
1123 for (intp i = 0; i < width; i++) dest[i] = i < N ? v[i] : 0;
1124 }
1125
1126 inline intp RangeCheck(VM &vm, const Value &idx, intp range, intp bias = 0) {
1127 auto i = idx.ival();
1128 if (i < bias || i >= bias + range)
1129 vm.BuiltinError(cat("index out of range [", bias, "..", bias + range, "): ", i));
1130 return i;
1131 }
1132
GetResourceDec(VM & vm,const Value & val,const ResourceType * type)1133 template<typename T> inline T GetResourceDec(VM &vm, const Value &val, const ResourceType *type) {
1134 if (!val.True())
1135 return nullptr;
1136 auto x = val.xval();
1137 if (x->type != type)
1138 vm.BuiltinError(string_view("needed resource type: ") + type->name + ", got: " +
1139 x->type->name);
1140 return (T)x->val;
1141 }
1142
ValueToVectorOfStrings(Value & v)1143 inline vector<string> ValueToVectorOfStrings(Value &v) {
1144 vector<string> r;
1145 for (int i = 0; i < v.vval()->len; i++) r.push_back(string(v.vval()->At(i).sval()->strv()));
1146 return r;
1147 }
1148
ToValueOfVectorOfStrings(VM & vm,const vector<string> & in)1149 inline Value ToValueOfVectorOfStrings(VM &vm, const vector<string> &in) {
1150 auto v = vm.NewVec(0, (intp)in.size(), TYPE_ELEM_VECTOR_OF_STRING);
1151 for (auto &a : in) v->Push(vm, vm.NewString(a));
1152 return Value(v);
1153 }
1154
ToValueOfVectorOfStringsEmpty(VM & vm,const int2 & size,char init)1155 inline Value ToValueOfVectorOfStringsEmpty(VM &vm, const int2 &size, char init) {
1156 auto v = vm.NewVec(0, size.y, TYPE_ELEM_VECTOR_OF_STRING);
1157 for (int i = 0; i < size.y; i++) {
1158 auto s = vm.NewString(size.x);
1159 memset((char *)s->data(), init, size.x);
1160 v->Push(vm, s);
1161 }
1162 return Value(v);
1163 }
1164
1165 void EscapeAndQuote(string_view s, ostringstream &ss);
1166
1167 struct LCoRoutine : RefObj {
1168 bool active = true; // Goes to false when it has hit the end of the coroutine instead of a yield.
1169
1170 int stackstart; // When currently running, otherwise -1
1171 Value *stackcopy = nullptr;
1172 int stackcopylen = 0;
1173 int stackcopymax = 0;
1174
1175 int stackframestart; // When currently running, otherwise -1
1176 StackFrame *stackframescopy = nullptr;
1177 int stackframecopylen = 0;
1178 int stackframecopymax = 0;
1179 int top_at_suspend = -1;
1180
1181 InsPtr returnip;
1182 const int *varip;
1183 LCoRoutine *parent;
1184
LCoRoutineLCoRoutine1185 LCoRoutine(int _ss, int _sfs, InsPtr _rip, const int *_vip, LCoRoutine *_p, type_elem_t cti)
1186 : RefObj(cti), stackstart(_ss), stackframestart(_sfs), returnip(_rip), varip(_vip),
1187 parent(_p) {}
1188
CurrentLCoRoutine1189 Value &Current(VM &vm) {
1190 if (stackstart >= 0) vm.BuiltinError("cannot get value of active coroutine");
1191 return stackcopy[stackcopylen - 1].LTINCTYPE(vm.GetTypeInfo(ti(vm).yieldtype).t);
1192 }
1193
ResizeLCoRoutine1194 void Resize(VM &vm, int newlen) {
1195 if (newlen > stackcopymax) {
1196 if (stackcopy) DeallocSubBuf(vm, stackcopy, stackcopymax);
1197 stackcopy = AllocSubBuf<Value>(vm, stackcopymax = newlen, TYPE_ELEM_VALUEBUF);
1198 }
1199 stackcopylen = newlen;
1200 }
1201
ResizeFramesLCoRoutine1202 void ResizeFrames(VM &vm, int newlen) {
1203 if (newlen > stackframecopymax) {
1204 if (stackframescopy) DeallocSubBuf(vm, stackframescopy, stackframecopymax);
1205 stackframescopy = AllocSubBuf<StackFrame>(vm, stackframecopymax = newlen,
1206 TYPE_ELEM_STACKFRAMEBUF);
1207 }
1208 stackframecopylen = newlen;
1209 }
1210
SuspendLCoRoutine1211 int Suspend(VM &vm, int top, Value *stack, vector<StackFrame> &stackframes, InsPtr &rip,
1212 LCoRoutine *&curco) {
1213 assert(stackstart >= 0);
1214 swap(rip, returnip);
1215 assert(curco == this);
1216 curco = parent;
1217 parent = nullptr;
1218 ResizeFrames(vm, (int)stackframes.size() - stackframestart);
1219 t_memcpy(stackframescopy, stackframes.data() + stackframestart, stackframecopylen);
1220 stackframes.erase(stackframes.begin() + stackframestart, stackframes.end());
1221 stackframestart = -1;
1222 top_at_suspend = top;
1223 Resize(vm, top - stackstart);
1224 t_memcpy(stackcopy, stack + stackstart, stackcopylen);
1225 int ss = stackstart;
1226 stackstart = -1;
1227 return ss;
1228 }
1229
AdjustStackFramesLCoRoutine1230 void AdjustStackFrames(int top) {
1231 int topdelta = (top + stackcopylen) - top_at_suspend;
1232 if (topdelta) {
1233 for (int i = 0; i < stackframecopylen; i++) {
1234 stackframescopy[i].spstart += topdelta;
1235 }
1236 }
1237 }
1238
ResumeLCoRoutine1239 int Resume(int top, Value *stack, vector<StackFrame> &stackframes, InsPtr &rip, LCoRoutine *p) {
1240 assert(stackstart < 0);
1241 swap(rip, returnip);
1242 assert(!parent);
1243 parent = p;
1244 stackframestart = (int)stackframes.size();
1245 AdjustStackFrames(top);
1246 stackframes.insert(stackframes.end(), stackframescopy, stackframescopy + stackframecopylen);
1247 stackstart = top;
1248 // FIXME: assume that it fits, which is not guaranteed with recursive coros
1249 t_memcpy(stack + top, stackcopy, stackcopylen);
1250 return stackcopylen;
1251 }
1252
BackupParentVarsLCoRoutine1253 void BackupParentVars(VM &vm, Value *vars) {
1254 // stored here while coro is active
1255 Resize(vm, *varip);
1256 for (int i = 1; i <= *varip; i++) {
1257 auto &var = vars[varip[i]];
1258 // we don't INC, since parent var is still on the stack and will hold ref
1259 stackcopy[i - 1] = var;
1260 }
1261 }
1262
AccessVarLCoRoutine1263 Value &AccessVar(int savedvaridx) {
1264 assert(stackstart < 0);
1265 // Variables are always saved on top of the stack before the stackcopy gets made, so they
1266 // are last, followed by the retval (thus -1).
1267 return stackcopy[stackcopylen - *varip + savedvaridx - 1];
1268 }
1269
GetVarLCoRoutine1270 Value &GetVar(VM &vm, int ididx) {
1271 if (stackstart >= 0)
1272 vm.BuiltinError("cannot access locals of running coroutine");
1273 // FIXME: we can probably make it work without this search, but for now no big deal
1274 for (int i = 1; i <= *varip; i++) {
1275 if (varip[i] == ididx) {
1276 return AccessVar(i - 1);
1277 }
1278 }
1279 // This one should be really rare, since parser already only allows lexically contained vars
1280 // for that function, could happen when accessing var that's not in the callchain of yields.
1281 vm.BuiltinError("local variable being accessed is not part of coroutine state");
1282 return *stackcopy;
1283 }
1284
DeleteSelfLCoRoutine1285 void DeleteSelf(VM &vm) {
1286 assert(stackstart < 0);
1287 if (stackcopy) {
1288 auto curvaltype = vm.GetTypeInfo(ti(vm).yieldtype).t;
1289 auto &ts = stackcopy[--stackcopylen];
1290 ts.LTDECTYPE(vm, curvaltype);
1291 if (active) {
1292 for (int i = *varip; i > 0; i--) {
1293 auto &vti = vm.GetVarTypeInfo(varip[i]);
1294 stackcopy[--stackcopylen].LTDECTYPE(vm, vti.t);
1295 }
1296 top_at_suspend -= *varip + 1;
1297 // This calls Resume() to get the rest back onto the stack, then unwinds it.
1298 vm.CoVarCleanup(this);
1299 } else {
1300 assert(!stackcopylen);
1301 }
1302 DeallocSubBuf(vm, stackcopy, stackcopymax);
1303 }
1304 if (stackframescopy) DeallocSubBuf(vm, stackframescopy, stackframecopymax);
1305 vm.pool.dealloc(this, sizeof(LCoRoutine));
1306 }
1307
ElemTypeLCoRoutine1308 ValueType ElemType(VM &vm, int i) {
1309 assert(i < *varip);
1310 auto varidx = varip[i + 1];
1311 auto &vti = vm.GetVarTypeInfo(varidx);
1312 auto vt = vti.t;
1313 if (vt == V_NIL) vt = vm.GetTypeInfo(vti.subt).t;
1314 #if RTT_ENABLED
1315 auto &var = AccessVar(i);
1316 // FIXME: For testing.
1317 if(vt != var.type && var.type != V_NIL && !(vt == V_VECTOR && IsUDT(var.type))) {
1318 LOG_INFO("coro elem ", vti.Debug(vm), " != ", BaseTypeName(var.type));
1319 assert(false);
1320 }
1321 #endif
1322 return vt;
1323 }
1324 };
1325
1326 } // namespace lobster
1327
1328 #endif // LOBSTER_VMDATA
1329