1 // Copyright 2014 Wouter van Oortmerssen. All rights reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 // http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14
15 #include "lobster/stdafx.h"
16
17 #include "lobster/disasm.h"
18
19 namespace lobster {
20
21 #ifndef NDEBUG
22 #define VM_PROFILER // tiny VM slowdown and memory usage when enabled
23 #endif
24
25 #ifdef VM_COMPILED_CODE_MODE
26 #ifdef _WIN32
27 #pragma warning (disable: 4458) // ip hides class member, which we rely on
28 #pragma warning (disable: 4100) // ip may not be touched
29 #endif
30 #endif
31
32 enum {
33 // *8 bytes each
34 INITSTACKSIZE = 4 * 1024,
35 // *8 bytes each, modest on smallest handheld we support (iPhone 3GS has 256MB).
36 DEFMAXSTACKSIZE = 128 * 1024,
37 // *8 bytes each, max by which the stack could possibly grow in a single call.
38 STACKMARGIN = 1 * 1024
39 };
40
41 #define MEASURE_INSTRUCTION_COMBINATIONS 0
42 #if MEASURE_INSTRUCTION_COMBINATIONS
43 map<pair<int, int>, size_t> instruction_combinations;
44 int last_instruction_opc = -1;
45 #endif
46
VM(VMArgs && vmargs)47 VM::VM(VMArgs &&vmargs) : VMArgs(std::move(vmargs)), maxstacksize(DEFMAXSTACKSIZE) {
48 auto bcfb = (uchar *)(static_bytecode ? static_bytecode : bytecode_buffer.data());
49 auto bcs = static_bytecode ? static_size : bytecode_buffer.size();
50 flatbuffers::Verifier verifier(bcfb, bcs);
51 auto ok = bytecode::VerifyBytecodeFileBuffer(verifier);
52 if (!ok) THROW_OR_ABORT(string("bytecode file failed to verify"));
53 bcf = bytecode::GetBytecodeFile(bcfb);
54 if (bcf->bytecode_version() != LOBSTER_BYTECODE_FORMAT_VERSION)
55 THROW_OR_ABORT(string("bytecode is from a different version of Lobster"));
56 codelen = bcf->bytecode()->Length();
57 if (FLATBUFFERS_LITTLEENDIAN) {
58 // We can use the buffer directly.
59 codestart = (const int *)bcf->bytecode()->Data();
60 typetable = (const type_elem_t *)bcf->typetable()->Data();
61 } else {
62 for (uint i = 0; i < codelen; i++)
63 codebigendian.push_back(bcf->bytecode()->Get(i));
64 codestart = codebigendian.data();
65
66 for (uint i = 0; i < bcf->typetable()->Length(); i++)
67 typetablebigendian.push_back((type_elem_t)bcf->typetable()->Get(i));
68 typetable = typetablebigendian.data();
69 }
70 #ifdef VM_COMPILED_CODE_MODE
71 compiled_code_ip = entry_point;
72 #else
73 ip = codestart;
74 #endif
75 vars = new Value[bcf->specidents()->size()];
76 stack = new Value[stacksize = INITSTACKSIZE];
77 #ifdef VM_PROFILER
78 byteprofilecounts = new uint64_t[codelen];
79 memset(byteprofilecounts, 0, sizeof(uint64_t) * codelen);
80 #endif
81 vml.LogInit(bcfb);
82 InstructionPointerInit();
83 constant_strings.resize(bcf->stringtable()->size());
84 #ifdef VM_COMPILED_CODE_MODE
85 assert(native_vtables);
86 for (size_t i = 0; i < bcf->vtables()->size(); i++) {
87 vtables.push_back(InsPtr(native_vtables[i]));
88 }
89 #else
90 assert(!native_vtables);
91 for (auto bcs : *bcf->vtables()) {
92 vtables.push_back(InsPtr(bcs));
93 }
94 #endif
95 }
96
~VM()97 VM::~VM() {
98 TerminateWorkers();
99 if (stack) delete[] stack;
100 if (vars) delete[] vars;
101 if (byteprofilecounts) delete[] byteprofilecounts;
102 }
103
OneMoreFrame()104 void VM::OneMoreFrame() {
105 // We just landed back into the VM after being suspended inside a gl_frame() call.
106 // Emulate the return of gl_frame():
107 VM_PUSH(Value(1)); // We're not terminating yet.
108 #ifdef VM_COMPILED_CODE_MODE
109 // Native code generators ensure that next_call_target is set before
110 // a native function call, and that it is returned to the trampoline
111 // after, so do the same thing here.
112 compiled_code_ip = (const void *)next_call_target;
113 #endif
114 EvalProgram(); // Continue execution as if nothing happened.
115 }
116
GetVarTypeInfo(int varidx)117 const TypeInfo &VM::GetVarTypeInfo(int varidx) {
118 return GetTypeInfo((type_elem_t)bcf->specidents()->Get(varidx)->typeidx());
119 }
120
GetIntVectorType(int which)121 type_elem_t VM::GetIntVectorType(int which) {
122 auto i = bcf->default_int_vector_types()->Get(which);
123 return type_elem_t(i < 0 ? -1 : i);
124 }
GetFloatVectorType(int which)125 type_elem_t VM::GetFloatVectorType(int which) {
126 auto i = bcf->default_float_vector_types()->Get(which);
127 return type_elem_t(i < 0 ? -1 : i);
128 }
129
_LeakSorter(void * va,void * vb)130 static bool _LeakSorter(void *va, void *vb) {
131 auto a = (RefObj *)va;
132 auto b = (RefObj *)vb;
133 return a->refc != b->refc
134 ? a->refc > b->refc
135 : (a->tti != b->tti
136 ? a->tti > b->tti
137 : false);
138 }
139
DumpVal(RefObj * ro,const char * prefix)140 void VM::DumpVal(RefObj *ro, const char *prefix) {
141 ostringstream ss;
142 ss << prefix << ": ";
143 RefToString(*this, ss, ro, debugpp);
144 ss << " (" << ro->refc << "): " << (size_t)ro;
145 LOG_DEBUG(ss.str());
146 }
147
DumpFileLine(const int * fip,ostringstream & ss)148 void VM::DumpFileLine(const int *fip, ostringstream &ss) {
149 // error is usually in the byte before the current ip.
150 auto li = LookupLine(fip - 1, codestart, bcf);
151 ss << bcf->filenames()->Get(li->fileidx())->string_view() << '(' << li->line() << ')';
152 }
153
DumpLeaks()154 void VM::DumpLeaks() {
155 vector<void *> leaks = pool.findleaks();
156 auto filename = "leaks.txt";
157 if (leaks.empty()) {
158 if (FileExists(filename)) FileDelete(filename);
159 } else {
160 LOG_ERROR("LEAKS FOUND (this indicates cycles in your object graph, or a bug in"
161 " Lobster)");
162 ostringstream ss;
163 #ifndef VM_COMPILED_CODE_MODE
164 ss << "in: ";
165 DumpFileLine(ip, ss);
166 ss << "\n";
167 #endif
168 sort(leaks.begin(), leaks.end(), _LeakSorter);
169 PrintPrefs leakpp = debugpp;
170 leakpp.cycles = 0;
171 for (auto p : leaks) {
172 auto ro = (RefObj *)p;
173 switch(ro->ti(*this).t) {
174 case V_VALUEBUF:
175 case V_STACKFRAMEBUF:
176 break;
177 case V_STRING:
178 case V_COROUTINE:
179 case V_RESOURCE:
180 case V_VECTOR:
181 case V_CLASS: {
182 ro->CycleStr(ss);
183 ss << " = ";
184 RefToString(*this, ss, ro, leakpp);
185 #if DELETE_DELAY
186 ss << " ";
187 DumpFileLine(ro->alloc_ip, ss);
188 ss << " " << (size_t)ro;
189 #endif
190 ss << "\n";
191 break;
192 }
193 default: assert(false);
194 }
195 }
196 #ifndef NDEBUG
197 LOG_ERROR(ss.str());
198 #else
199 if (leaks.size() < 50) {
200 LOG_ERROR(ss.str());
201 } else {
202 LOG_ERROR(leaks.size(), " leaks, details in ", filename);
203 WriteFile(filename, false, ss.str());
204 }
205 #endif
206 }
207 pool.printstats(false);
208 }
209
OnAlloc(RefObj * ro)210 void VM::OnAlloc(RefObj *ro) {
211 #if DELETE_DELAY
212 LOG_DEBUG("alloc: ", (size_t)ro);
213 ro->alloc_ip = ip;
214 #else
215 (void)ro;
216 #endif
217 }
218
219 #undef new
220
NewVec(intp initial,intp max,type_elem_t tti)221 LVector *VM::NewVec(intp initial, intp max, type_elem_t tti) {
222 assert(GetTypeInfo(tti).t == V_VECTOR);
223 auto v = new (pool.alloc_small(sizeof(LVector))) LVector(*this, initial, max, tti);
224 OnAlloc(v);
225 return v;
226 }
227
NewObject(intp max,type_elem_t tti)228 LObject *VM::NewObject(intp max, type_elem_t tti) {
229 assert(IsUDT(GetTypeInfo(tti).t));
230 auto s = new (pool.alloc(sizeof(LObject) + sizeof(Value) * max)) LObject(tti);
231 OnAlloc(s);
232 return s;
233 }
NewString(size_t l)234 LString *VM::NewString(size_t l) {
235 auto s = new (pool.alloc(sizeof(LString) + l + 1)) LString((int)l);
236 OnAlloc(s);
237 return s;\
238 }
NewCoRoutine(InsPtr rip,const int * vip,LCoRoutine * p,type_elem_t cti)239 LCoRoutine *VM::NewCoRoutine(InsPtr rip, const int *vip, LCoRoutine *p, type_elem_t cti) {
240 assert(GetTypeInfo(cti).t == V_COROUTINE);
241 auto c = new (pool.alloc(sizeof(LCoRoutine)))
242 LCoRoutine(sp + 2 /* top of sp + pushed coro */, (int)stackframes.size(), rip, vip, p, cti);
243 OnAlloc(c);
244 return c;
245 }
NewResource(void * v,const ResourceType * t)246 LResource *VM::NewResource(void *v, const ResourceType *t) {
247 auto r = new (pool.alloc(sizeof(LResource))) LResource(v, t);
248 OnAlloc(r);
249 return r;
250 }
251 #ifdef _WIN32
252 #ifndef NDEBUG
253 #define new DEBUG_NEW
254 #endif
255 #endif
256
NewString(string_view s)257 LString *VM::NewString(string_view s) {
258 auto r = NewString(s.size());
259 auto dest = (char *)r->data();
260 memcpy(dest, s.data(), s.size());
261 #if DELETE_DELAY
262 LOG_DEBUG("string: \"", s, "\" - ", (size_t)r);
263 #endif
264 return r;
265 }
266
NewString(string_view s1,string_view s2)267 LString *VM::NewString(string_view s1, string_view s2) {
268 auto s = NewString(s1.size() + s2.size());
269 auto dest = (char *)s->data();
270 memcpy(dest, s1.data(), s1.size());
271 memcpy(dest + s1.size(), s2.data(), s2.size());
272 return s;
273 }
274
ResizeString(LString * s,intp size,int c,bool back)275 LString *VM::ResizeString(LString *s, intp size, int c, bool back) {
276 auto ns = NewString(size);
277 auto sdest = (char *)ns->data();
278 auto cdest = sdest;
279 auto remain = size - s->len;
280 if (back) sdest += remain;
281 else cdest += s->len;
282 memcpy(sdest, s->data(), s->len);
283 memset(cdest, c, remain);
284 s->Dec(*this);
285 return ns;
286 }
287
288 // This function is now way less important than it was when the language was still dynamically
289 // typed. But ok to leave it as-is for "index out of range" and other errors that are still dynamic.
Error(string err,const RefObj * a,const RefObj * b)290 Value VM::Error(string err, const RefObj *a, const RefObj *b) {
291 if (trace == TraceMode::TAIL && trace_output.size()) {
292 string s;
293 for (size_t i = trace_ring_idx; i < trace_output.size(); i++) s += trace_output[i].str();
294 for (size_t i = 0; i < trace_ring_idx; i++) s += trace_output[i].str();
295 s += err;
296 THROW_OR_ABORT(s);
297 }
298 ostringstream ss;
299 #ifndef VM_COMPILED_CODE_MODE
300 DumpFileLine(ip, ss);
301 ss << ": ";
302 #endif
303 ss << "VM error: " << err;
304 if (a) { ss << "\n arg: "; RefToString(*this, ss, a, debugpp); }
305 if (b) { ss << "\n arg: "; RefToString(*this, ss, b, debugpp); }
306 while (sp >= 0 && (!stackframes.size() || sp != stackframes.back().spstart)) {
307 // Sadly can't print this properly.
308 ss << "\n stack: ";
309 to_string_hex(ss, (size_t)VM_TOP().any());
310 if (pool.pointer_is_in_allocator(VM_TOP().any())) {
311 ss << ", maybe: ";
312 RefToString(*this, ss, VM_TOP().ref(), debugpp);
313 }
314 VM_POP(); // We don't DEC here, as we can't know what type it is.
315 // This is ok, as we ignore leaks in case of an error anyway.
316 }
317 for (;;) {
318 if (!stackframes.size()) break;
319 int deffun = *(stackframes.back().funstart);
320 if (deffun >= 0) {
321 ss << "\nin function: " << bcf->functions()->Get(deffun)->name()->string_view();
322 } else {
323 ss << "\nin block";
324 }
325 #ifndef VM_COMPILED_CODE_MODE
326 ss << " -> ";
327 DumpFileLine(ip, ss);
328 #endif
329 VarCleanup<1>(ss.tellp() < 10000 ? &ss : nullptr, -2 /* clean up temps always */);
330 }
331 ss << "\nglobals:";
332 for (size_t i = 0; i < bcf->specidents()->size(); ) {
333 i += DumpVar(ss, vars[i], i, true);
334 }
335 THROW_OR_ABORT(ss.str());
336 }
337
VMAssert(const char * what)338 void VM::VMAssert(const char *what) {
339 Error(string("VM internal assertion failure: ") + what);
340 }
VMAssert(const char * what,const RefObj * a,const RefObj * b)341 void VM::VMAssert(const char *what, const RefObj *a, const RefObj *b) {
342 Error(string("VM internal assertion failure: ") + what, a, b);
343 }
344
345 #if !defined(NDEBUG) && RTT_ENABLED
346 #define STRINGIFY(x) #x
347 #define TOSTRING(x) STRINGIFY(x)
348 #define VMASSERT(test) { if (!(test)) VMAssert(__FILE__ ": " TOSTRING(__LINE__) ": " #test); }
349 #else
350 #define VMASSERT(test) {}
351 #endif
352 #if RTT_ENABLED
353 #define VMTYPEEQ(val, vt) VMASSERT((val).type == (vt))
354 #else
355 #define VMTYPEEQ(val, vt) { (void)(val); (void)(vt); }
356 #endif
357
DumpVar(ostringstream & ss,const Value & x,size_t idx,bool dumpglobals)358 int VM::DumpVar(ostringstream &ss, const Value &x, size_t idx, bool dumpglobals) {
359 auto sid = bcf->specidents()->Get((uint)idx);
360 auto id = bcf->idents()->Get(sid->ididx());
361 if (id->readonly() || id->global() != dumpglobals) return 1;
362 auto name = id->name()->string_view();
363 auto &ti = GetVarTypeInfo((int)idx);
364 #if RTT_ENABLED
365 if (ti.t != x.type) return 1; // Likely uninitialized.
366 #endif
367 ss << "\n " << name << " = ";
368 if (IsStruct(ti.t)) {
369 StructToString(ss, debugpp, ti, &x);
370 return ti.len;
371 } else {
372 x.ToString(*this, ss, ti, debugpp);
373 return 1;
374 }
375 }
376
FinalStackVarsCleanup()377 void VM::FinalStackVarsCleanup() {
378 VMASSERT(sp < 0 && !stackframes.size());
379 #ifndef NDEBUG
380 LOG_INFO("stack at its highest was: ", maxsp);
381 #endif
382 }
383
JumpTo(InsPtr j)384 void VM::JumpTo(InsPtr j) {
385 #ifdef VM_COMPILED_CODE_MODE
386 next_call_target = j.f;
387 #else
388 ip = j.f + codestart;
389 #endif
390 }
391
GetIP()392 InsPtr VM::GetIP() {
393 #ifdef VM_COMPILED_CODE_MODE
394 return InsPtr(next_call_target);
395 #else
396 return InsPtr(ip - codestart);
397 #endif
398 }
399
VarCleanup(ostringstream * error,int towhere)400 template<int is_error> int VM::VarCleanup(ostringstream *error, int towhere) {
401 (void)error;
402 auto &stf = stackframes.back();
403 if constexpr (!is_error) VMASSERT(sp == stf.spstart);
404 auto fip = stf.funstart;
405 fip++; // function id.
406 auto nargs = *fip++;
407 auto freevars = fip + nargs;
408 fip += nargs;
409 auto ndef = *fip++;
410 fip += ndef;
411 auto defvars = fip;
412 auto nkeepvars = *fip++;
413 if constexpr (is_error) {
414 // Do this first, since values may get deleted below.
415 for (int j = 0; j < ndef; ) {
416 auto i = *(defvars - j - 1);
417 j += DumpVar(*error, vars[i], i, false);
418 }
419 for (int j = 0; j < nargs; ) {
420 auto i = *(freevars - j - 1);
421 j += DumpVar(*error, vars[i], i, false);
422 }
423 }
424 for (int i = 0; i < nkeepvars; i++) VM_POP().LTDECRTNIL(*this);
425 auto ownedvars = *fip++;
426 for (int i = 0; i < ownedvars; i++) vars[*fip++].LTDECRTNIL(*this);
427 while (ndef--) {
428 auto i = *--defvars;
429 vars[i] = VM_POP();
430 }
431 while (nargs--) {
432 auto i = *--freevars;
433 vars[i] = VM_POP();
434 }
435 JumpTo(stf.retip);
436 bool lastunwind = towhere == *stf.funstart;
437 stackframes.pop_back();
438 if (!lastunwind) {
439 // This kills any temps on the stack. If these are refs these should not be
440 // owners, since a var or keepvar owns them instead.
441 sp = stackframes.size() ? stackframes.back().spstart : -1;
442 }
443 return lastunwind;
444 }
445
446 // Initializes only 3 fields of the stack frame, FunIntro must be called right after.
StartStackFrame(InsPtr retip)447 void VM::StartStackFrame(InsPtr retip) {
448 stackframes.push_back(StackFrame());
449 auto &stf = stackframes.back();
450 stf.retip = retip;
451 }
452
FunIntroPre(InsPtr fun)453 void VM::FunIntroPre(InsPtr fun) {
454 JumpTo(fun);
455 #ifdef VM_COMPILED_CODE_MODE
456 // We don't call FunIntro() here, instead the compiled code for FUNSTART/FUNMULTI actually
457 // does that.
458 #else
459 VMASSERT(*ip == IL_FUNSTART);
460 ip++;
461 FunIntro();
462 #endif
463 }
464
465 // Only valid to be called right after StartStackFrame, with no bytecode in-between.
FunIntro(VM_OP_ARGS)466 void VM::FunIntro(VM_OP_ARGS) {
467 #ifdef VM_PROFILER
468 vm_count_fcalls++;
469 #endif
470 auto funstart = ip;
471 ip++; // definedfunction
472 if (sp > stacksize - STACKMARGIN) {
473 // per function call increment should be small
474 // FIXME: not safe for untrusted scripts, could simply add lots of locals
475 // could record max number of locals? not allow more than N locals?
476 if (stacksize >= maxstacksize) Error("stack overflow! (use set_max_stack_size() if needed)");
477 auto nstack = new Value[stacksize *= 2];
478 t_memcpy(nstack, stack, sp + 1);
479 delete[] stack;
480 stack = nstack;
481
482 LOG_DEBUG("stack grew to: ", stacksize);
483 }
484 auto nargs_fun = *ip++;
485 for (int i = 0; i < nargs_fun; i++) swap(vars[ip[i]], stack[sp - nargs_fun + i + 1]);
486 ip += nargs_fun;
487 auto ndef = *ip++;
488 for (int i = 0; i < ndef; i++) {
489 // for most locals, this just saves an nil, only in recursive cases it has an actual value.
490 auto varidx = *ip++;
491 VM_PUSH(vars[varidx]);
492 vars[varidx] = Value();
493 }
494 auto nkeepvars = *ip++;
495 for (int i = 0; i < nkeepvars; i++) VM_PUSH(Value());
496 auto nownedvars = *ip++;
497 ip += nownedvars;
498 auto &stf = stackframes.back();
499 stf.funstart = funstart;
500 stf.spstart = sp;
501 #ifndef NDEBUG
502 if (sp > maxsp) maxsp = sp;
503 #endif
504 }
505
FunOut(int towhere,int nrv)506 void VM::FunOut(int towhere, int nrv) {
507 sp -= nrv;
508 // Have to store these off the stack, since VarCleanup() may cause stack activity if coroutines
509 // are destructed.
510 ts_memcpy(retvalstemp, VM_TOPPTR(), nrv);
511 for(;;) {
512 if (!stackframes.size()) {
513 Error("\"return from " + bcf->functions()->Get(towhere)->name()->string_view() +
514 "\" outside of function");
515 }
516 if (VarCleanup<0>(nullptr, towhere)) break;
517 }
518 ts_memcpy(VM_TOPPTR(), retvalstemp, nrv);
519 sp += nrv;
520 }
521
CoVarCleanup(LCoRoutine * co)522 void VM::CoVarCleanup(LCoRoutine *co) {
523 // Convenient way to copy everything back onto the stack.
524 InsPtr tip(0);
525 auto copylen = co->Resume(sp + 1, stack, stackframes, tip, nullptr);
526 auto startsp = sp;
527 sp += copylen;
528 for (int i = co->stackframecopylen - 1; i >= 0 ; i--) {
529 auto &stf = stackframes.back();
530 sp = stf.spstart; // Kill any temps on top of the stack.
531 // Save the ip, because VarCleanup will jump to it.
532 auto bip = GetIP();
533 VarCleanup<0>(nullptr, !i ? *stf.funstart : -2);
534 JumpTo(bip);
535 }
536 assert(sp == startsp);
537 (void)startsp;
538 }
539
CoNonRec(const int * varip)540 void VM::CoNonRec(const int *varip) {
541 // probably could be skipped in a "release" mode
542 for (auto co = curcoroutine; co; co = co->parent) if (co->varip == varip) {
543 // if allowed, inner coro would save vars of outer, and then possibly restore them outside
544 // of scope of parent
545 Error("cannot create coroutine recursively");
546 }
547 // this check guarantees all saved stack vars are undef.
548 }
549
CoNew(VM_OP_ARGS VM_COMMA VM_OP_ARGS_CALL)550 void VM::CoNew(VM_OP_ARGS VM_COMMA VM_OP_ARGS_CALL) {
551 #ifdef VM_COMPILED_CODE_MODE
552 ip++;
553 InsPtr returnip(fcont);
554 #else
555 InsPtr returnip(*ip++);
556 #endif
557 auto ctidx = (type_elem_t)*ip++;
558 CoNonRec(ip);
559 curcoroutine = NewCoRoutine(returnip, ip, curcoroutine, ctidx);
560 curcoroutine->BackupParentVars(*this, vars);
561 int nvars = *ip++;
562 ip += nvars;
563 // Always have the active coroutine at top of the stack, retaining 1 refcount. This is
564 // because it is not guaranteed that there any other references, and we can't have this drop
565 // to 0 while active.
566 VM_PUSH(Value(curcoroutine));
567 }
568
CoSuspend(InsPtr retip)569 void VM::CoSuspend(InsPtr retip) {
570 int newtop = curcoroutine->Suspend(*this, sp + 1, stack, stackframes, retip, curcoroutine);
571 JumpTo(retip);
572 sp = newtop - 1; // top of stack is now coro value from create or resume
573 }
574
CoClean()575 void VM::CoClean() {
576 // This function is like yield, except happens implicitly when the coroutine returns.
577 // It will jump back to the resume (or create) that invoked it.
578 for (int i = 1; i <= *curcoroutine->varip; i++) {
579 auto &var = vars[curcoroutine->varip[i]];
580 var = curcoroutine->stackcopy[i - 1];
581 }
582 auto co = curcoroutine;
583 CoSuspend(InsPtr(0));
584 VMASSERT(co->stackcopylen == 1);
585 co->active = false;
586 }
587
CoYield(VM_OP_ARGS_CALL)588 void VM::CoYield(VM_OP_ARGS_CALL) {
589 assert(curcoroutine); // Should not be possible since yield calls are statically checked.
590 #ifdef VM_COMPILED_CODE_MODE
591 InsPtr retip(fcont);
592 #else
593 InsPtr retip(ip - codestart);
594 #endif
595 auto ret = VM_POP();
596 for (int i = 1; i <= *curcoroutine->varip; i++) {
597 auto &var = vars[curcoroutine->varip[i]];
598 VM_PUSH(var);
599 //var.type = V_NIL;
600 var = curcoroutine->stackcopy[i - 1];
601 }
602 VM_PUSH(ret); // current value always top of the stack, saved as part of suspended coroutine.
603 CoSuspend(retip);
604 // Actual top of stack here is coroutine itself, that we placed here with CoResume.
605 }
606
CoResume(LCoRoutine * co)607 void VM::CoResume(LCoRoutine *co) {
608 if (co->stackstart >= 0)
609 Error("cannot resume running coroutine");
610 if (!co->active)
611 Error("cannot resume finished coroutine");
612 // This will be the return value for the corresponding yield, and holds the ref for gc.
613 VM_PUSH(Value(co));
614 CoNonRec(co->varip);
615 auto rip = GetIP();
616 sp += co->Resume(sp + 1, stack, stackframes, rip, curcoroutine);
617 JumpTo(rip);
618 curcoroutine = co;
619 // must be, since those vars got backed up in it before
620 VMASSERT(curcoroutine->stackcopymax >= *curcoroutine->varip);
621 curcoroutine->stackcopylen = *curcoroutine->varip;
622 //curcoroutine->BackupParentVars(vars);
623 VM_POP().LTDECTYPE(*this, GetTypeInfo(curcoroutine->ti(*this).yieldtype).t); // previous current value
624 for (int i = *curcoroutine->varip; i > 0; i--) {
625 auto &var = vars[curcoroutine->varip[i]];
626 // No INC, since parent is still on the stack and hold ref for us.
627 curcoroutine->stackcopy[i - 1] = var;
628 var = VM_POP();
629 }
630 // the builtin call takes care of the return value
631 }
632
EndEval(const Value & ret,const TypeInfo & ti)633 void VM::EndEval(const Value &ret, const TypeInfo &ti) {
634 TerminateWorkers();
635 ostringstream ss;
636 ret.ToString(*this, ss, ti, programprintprefs);
637 evalret = ss.str();
638 ret.LTDECTYPE(*this, ti.t);
639 assert(sp == -1);
640 FinalStackVarsCleanup();
641 vml.LogCleanup();
642 for (auto s : constant_strings) {
643 if (s) s->Dec(*this);
644 }
645 while (!delete_delay.empty()) {
646 auto ro = delete_delay.back();
647 delete_delay.pop_back();
648 ro->DECDELETENOW(*this);
649 }
650 DumpLeaks();
651 VMASSERT(!curcoroutine);
652 #ifdef VM_PROFILER
653 LOG_INFO("Profiler statistics:");
654 uint64_t total = 0;
655 auto fraction = 200; // Line needs at least 0.5% to be counted.
656 vector<uint64_t> lineprofilecounts(bcf->lineinfo()->size());
657 for (size_t i = 0; i < codelen; i++) {
658 auto li = LookupLine(codestart + i, codestart, bcf); // FIXME: can do faster
659 size_t j = li - bcf->lineinfo()->Get(0);
660 lineprofilecounts[j] += byteprofilecounts[i];
661 total += byteprofilecounts[i];
662 }
663 struct LineRange { int line, lastline, fileidx; uint64_t count; };
664 vector<LineRange> uniques;
665 for (uint i = 0; i < bcf->lineinfo()->size(); i++) {
666 uint64_t c = lineprofilecounts[i];
667 if (c > total / fraction) {
668 auto li = bcf->lineinfo()->Get(i);
669 uniques.push_back(LineRange{ li->line(), li->line(), li->fileidx(), c });
670 }
671 }
672 std::sort(uniques.begin(), uniques.end(), [&] (const LineRange &a, const LineRange &b) {
673 return a.fileidx != b.fileidx ? a.fileidx < b.fileidx : a.line < b.line;
674 });
675 for (auto it = uniques.begin(); it != uniques.end();) {
676 if (it != uniques.begin()) {
677 auto pit = it - 1;
678 if (it->fileidx == pit->fileidx &&
679 ((it->line == pit->lastline) ||
680 (it->line == pit->lastline + 1 && pit->lastline++))) {
681 pit->count += it->count;
682 it = uniques.erase(it);
683 continue;
684 }
685 }
686 ++it;
687 }
688 for (auto &u : uniques) {
689 LOG_INFO(bcf->filenames()->Get(u.fileidx)->string_view(), "(", u.line,
690 u.lastline != u.line ? "-" + to_string(u.lastline) : "",
691 "): ", u.count * 100.0f / total, " %");
692 }
693 if (vm_count_fcalls) // remove trivial VM executions from output
694 LOG_INFO("ins ", vm_count_ins, ", fcall ", vm_count_fcalls, ", bcall ",
695 vm_count_bcalls, ", decref ", vm_count_decref);
696 #endif
697 #if MEASURE_INSTRUCTION_COMBINATIONS
698 struct trip { size_t freq; int opc1, opc2; };
699 vector<trip> combinations;
700 for (auto &p : instruction_combinations)
701 combinations.push_back({ p.second, p.first.first, p.first.second });
702 sort(combinations.begin(), combinations.end(), [](const trip &a, const trip &b) {
703 return a.freq > b.freq;
704 });
705 combinations.resize(50);
706 for (auto &c : combinations) {
707 LOG_PROGRAM("instruction ", ILNames()[c.opc1], " -> ", ILNames()[c.opc2], " (",
708 c.freq, "x)");
709 }
710 instruction_combinations.clear();
711 #endif
712 #ifndef VM_ERROR_RET_EXPERIMENT
713 THROW_OR_ABORT(string("end-eval"));
714 #endif
715 }
716
EvalProgram()717 void VM::EvalProgram() {
718 // Keep exception handling code in seperate function from hot loop in EvalProgramInner()
719 // just in case it affects the compiler.
720 #ifdef USE_EXCEPTION_HANDLING
721 try
722 #endif
723 {
724 EvalProgramInner();
725 }
726 #ifdef USE_EXCEPTION_HANDLING
727 catch (string &s) {
728 if (s != "end-eval") THROW_OR_ABORT(s);
729 }
730 #endif
731 }
732
TraceStream()733 ostringstream &VM::TraceStream() {
734 size_t trace_size = trace == TraceMode::TAIL ? 50 : 1;
735 if (trace_output.size() < trace_size) trace_output.resize(trace_size);
736 if (trace_ring_idx == trace_size) trace_ring_idx = 0;
737 auto &ss = trace_output[trace_ring_idx++];
738 ss.str(string());
739 return ss;
740 }
741
EvalProgramInner()742 void VM::EvalProgramInner() {
743 for (;;) {
744 #ifdef VM_COMPILED_CODE_MODE
745 #if VM_DISPATCH_METHOD == VM_DISPATCH_TRAMPOLINE
746 compiled_code_ip = ((block_t)compiled_code_ip)(*this);
747 #elif VM_DISPATCH_METHOD == VM_DISPATCH_SWITCH_GOTO
748 ((block_base_t)compiled_code_ip)(*this);
749 assert(false); // Should not return here.
750 #endif
751 #else
752 #ifndef NDEBUG
753 if (trace != TraceMode::OFF) {
754 auto &ss = TraceStream();
755 DisAsmIns(nfr, ss, ip, codestart, typetable, bcf);
756 ss << " [" << (sp + 1) << "] -";
757 #if RTT_ENABLED
758 #if DELETE_DELAY
759 ss << ' ' << (size_t)VM_TOP().any();
760 #endif
761 for (int i = 0; i < 3 && sp - i >= 0; i++) {
762 auto x = VM_TOPM(i);
763 ss << ' ';
764 x.ToStringBase(*this, ss, x.type, debugpp);
765 }
766 #endif
767 if (trace == TraceMode::TAIL) ss << '\n'; else LOG_PROGRAM(ss.str());
768 }
769 //currentline = LookupLine(ip).line;
770 #endif
771 #ifdef VM_PROFILER
772 auto code_idx = size_t(ip - codestart);
773 assert(code_idx < codelen);
774 byteprofilecounts[code_idx]++;
775 vm_count_ins++;
776 #endif
777 auto op = *ip++;
778 #if MEASURE_INSTRUCTION_COMBINATIONS
779 instruction_combinations[{ last_instruction_opc, op }]++;
780 last_instruction_opc = op;
781 #endif
782 #ifndef NDEBUG
783 if (op < 0 || op >= IL_MAX_OPS)
784 Error(cat("bytecode format problem: ", op));
785 #endif
786 #ifdef VM_ERROR_RET_EXPERIMENT
787 bool terminate =
788 #endif
789 ((*this).*(f_ins_pointers[op]))();
790 #ifdef VM_ERROR_RET_EXPERIMENT
791 if (terminate) return;
792 #endif
793 #endif
794 }
795 }
796
U_PUSHINT(int x)797 VM_INS_RET VM::U_PUSHINT(int x) { VM_PUSH(Value(x)); VM_RET; }
U_PUSHFLT(int x)798 VM_INS_RET VM::U_PUSHFLT(int x) { int2float i2f; i2f.i = x; VM_PUSH(Value(i2f.f)); VM_RET; }
U_PUSHNIL()799 VM_INS_RET VM::U_PUSHNIL() { VM_PUSH(Value()); VM_RET; }
800
U_PUSHINT64(int a,int b)801 VM_INS_RET VM::U_PUSHINT64(int a, int b) {
802 #if !VALUE_MODEL_64
803 Error("Code containing 64-bit constants cannot run on a 32-bit build.");
804 #endif
805 auto v = Int64FromInts(a, b);
806 VM_PUSH(Value(v));
807 VM_RET;
808 }
809
U_PUSHFLT64(int a,int b)810 VM_INS_RET VM::U_PUSHFLT64(int a, int b) {
811 int2float64 i2f;
812 i2f.i = Int64FromInts(a, b);
813 VM_PUSH(Value(i2f.f));
814 VM_RET;
815 }
816
U_PUSHFUN(int start VM_COMMA VM_OP_ARGS_CALL)817 VM_INS_RET VM::U_PUSHFUN(int start VM_COMMA VM_OP_ARGS_CALL) {
818 #ifdef VM_COMPILED_CODE_MODE
819 (void)start;
820 #else
821 auto fcont = start;
822 #endif
823 VM_PUSH(Value(InsPtr(fcont)));
824 VM_RET;
825 }
826
U_PUSHSTR(int i)827 VM_INS_RET VM::U_PUSHSTR(int i) {
828 // FIXME: have a way that constant strings can stay in the bytecode,
829 // or at least preallocate them all
830 auto &s = constant_strings[i];
831 if (!s) {
832 auto fb_s = bcf->stringtable()->Get(i);
833 s = NewString(fb_s->string_view());
834 }
835 #if STRING_CONSTANTS_KEEP
836 s->Inc();
837 #endif
838 VM_PUSH(Value(s));
839 VM_RET;
840 }
841
U_INCREF(int off)842 VM_INS_RET VM::U_INCREF(int off) {
843 VM_TOPM(off).LTINCRTNIL();
844 VM_RET;
845 }
846
U_KEEPREF(int off,int ki)847 VM_INS_RET VM::U_KEEPREF(int off, int ki) {
848 VM_TOPM(ki).LTDECRTNIL(*this); // FIXME: this is only here for inlined for bodies!
849 VM_TOPM(ki) = VM_TOPM(off);
850 VM_RET;
851 }
852
U_CALL(int f VM_COMMA VM_OP_ARGS_CALL)853 VM_INS_RET VM::U_CALL(int f VM_COMMA VM_OP_ARGS_CALL) {
854 #ifdef VM_COMPILED_CODE_MODE
855 (void)f;
856 block_t fun = 0; // Dynamic calls need this set, but for CALL it is ignored.
857 #else
858 auto fun = f;
859 auto fcont = ip - codestart;
860 #endif
861 StartStackFrame(InsPtr(fcont));
862 FunIntroPre(InsPtr(fun));
863 VM_RET;
864 }
865
U_CALLVCOND(VM_OP_ARGS_CALL)866 VM_INS_RET VM::U_CALLVCOND(VM_OP_ARGS_CALL) {
867 // FIXME: don't need to check for function value again below if false
868 if (!VM_TOP().True()) {
869 VM_POP();
870 #ifdef VM_COMPILED_CODE_MODE
871 next_call_target = 0;
872 #endif
873 } else {
874 U_CALLV(VM_FC_PASS_THRU);
875 }
876 VM_RET;
877 }
878
U_CALLV(VM_OP_ARGS_CALL)879 VM_INS_RET VM::U_CALLV(VM_OP_ARGS_CALL) {
880 Value fun = VM_POP();
881 VMTYPEEQ(fun, V_FUNCTION);
882 #ifndef VM_COMPILED_CODE_MODE
883 auto fcont = ip - codestart;
884 #endif
885 StartStackFrame(InsPtr(fcont));
886 FunIntroPre(fun.ip());
887 VM_RET;
888 }
889
U_DDCALL(int vtable_idx,int stack_idx VM_COMMA VM_OP_ARGS_CALL)890 VM_INS_RET VM::U_DDCALL(int vtable_idx, int stack_idx VM_COMMA VM_OP_ARGS_CALL) {
891 auto self = VM_TOPM(stack_idx);
892 VMTYPEEQ(self, V_CLASS);
893 auto start = self.oval()->ti(*this).vtable_start;
894 auto fun = vtables[start + vtable_idx];
895 #ifdef VM_COMPILED_CODE_MODE
896 #else
897 auto fcont = ip - codestart;
898 assert(fun.f >= 0);
899 #endif
900 StartStackFrame(InsPtr(fcont));
901 FunIntroPre(fun);
902 VM_RET;
903 }
904
U_FUNSTART(VM_OP_ARGS)905 VM_INS_RET VM::U_FUNSTART(VM_OP_ARGS) {
906 #ifdef VM_COMPILED_CODE_MODE
907 FunIntro(ip);
908 #else
909 VMASSERT(false);
910 #endif
911 VM_RET;
912 }
913
U_RETURN(int df,int nrv)914 VM_INS_RET VM::U_RETURN(int df, int nrv) {
915 FunOut(df, nrv);
916 VM_RET;
917 }
918
U_ENDSTATEMENT(int line,int fileidx)919 VM_INS_RET VM::U_ENDSTATEMENT(int line, int fileidx) {
920 #ifdef NDEBUG
921 (void)line;
922 (void)fileidx;
923 #else
924 if (trace != TraceMode::OFF) {
925 auto &ss = TraceStream();
926 ss << bcf->filenames()->Get(fileidx)->string_view() << '(' << line << ')';
927 if (trace == TraceMode::TAIL) ss << '\n'; else LOG_PROGRAM(ss.str());
928 }
929 #endif
930 assert(sp == stackframes.back().spstart);
931 VM_RET;
932 }
933
U_EXIT(int tidx)934 VM_INS_RET VM::U_EXIT(int tidx) {
935 if (tidx >= 0) EndEval(VM_POP(), GetTypeInfo((type_elem_t)tidx));
936 else EndEval(Value(), GetTypeInfo(TYPE_ELEM_ANY));
937 VM_TERMINATE;
938 }
939
U_CONT1(int nfi)940 VM_INS_RET VM::U_CONT1(int nfi) {
941 auto nf = nfr.nfuns[nfi];
942 nf->cont1(*this);
943 VM_RET;
944 }
945
ForLoop(intp len)946 VM_JMP_RET VM::ForLoop(intp len) {
947 #ifndef VM_COMPILED_CODE_MODE
948 auto cont = *ip++;
949 #endif
950 auto &i = VM_TOPM(1);
951 assert(i.type == V_INT);
952 i.setival(i.ival() + 1);
953 if (i.ival() < len) {
954 #ifdef VM_COMPILED_CODE_MODE
955 return true;
956 #else
957 ip = cont + codestart;
958 #endif
959 } else {
960 (void)VM_POP(); /* iter */
961 (void)VM_POP(); /* i */
962 #ifdef VM_COMPILED_CODE_MODE
963 return false;
964 #endif
965 }
966 }
967
968 #define FORELEM(L) \
969 auto &iter = VM_TOP(); \
970 auto i = VM_TOPM(1).ival(); \
971 assert(i < L); \
972
U_IFOR()973 VM_JMP_RET VM::U_IFOR() { return ForLoop(VM_TOP().ival()); VM_RET; }
U_VFOR()974 VM_JMP_RET VM::U_VFOR() { return ForLoop(VM_TOP().vval()->len); VM_RET; }
U_SFOR()975 VM_JMP_RET VM::U_SFOR() { return ForLoop(VM_TOP().sval()->len); VM_RET; }
976
U_IFORELEM()977 VM_INS_RET VM::U_IFORELEM() { FORELEM(iter.ival()); (void)iter; VM_PUSH(i); VM_RET; }
U_VFORELEM()978 VM_INS_RET VM::U_VFORELEM() { FORELEM(iter.vval()->len); iter.vval()->AtVW(*this, i); VM_RET; }
U_VFORELEMREF()979 VM_INS_RET VM::U_VFORELEMREF() { FORELEM(iter.vval()->len); auto el = iter.vval()->At(i); el.LTINCRTNIL(); VM_PUSH(el); VM_RET; }
U_SFORELEM()980 VM_INS_RET VM::U_SFORELEM() { FORELEM(iter.sval()->len); VM_PUSH(Value((int)((uchar *)iter.sval()->data())[i])); VM_RET; }
981
U_FORLOOPI()982 VM_INS_RET VM::U_FORLOOPI() {
983 auto &i = VM_TOPM(1); // This relies on for being inlined, otherwise it would be 2.
984 assert(i.type == V_INT);
985 VM_PUSH(i);
986 VM_RET;
987 }
988
U_BCALLRETV(int nfi)989 VM_INS_RET VM::U_BCALLRETV(int nfi) {
990 BCallProf();
991 auto nf = nfr.nfuns[nfi];
992 nf->fun.fV(*this);
993 VM_RET;
994 }
U_BCALLREFV(int nfi)995 VM_INS_RET VM::U_BCALLREFV(int nfi) {
996 BCallProf();
997 auto nf = nfr.nfuns[nfi];
998 nf->fun.fV(*this);
999 // This can only pop a single value, not called for structs.
1000 VM_POP().LTDECRTNIL(*this);
1001 VM_RET;
1002 }
U_BCALLUNBV(int nfi)1003 VM_INS_RET VM::U_BCALLUNBV(int nfi) {
1004 BCallProf();
1005 auto nf = nfr.nfuns[nfi];
1006 nf->fun.fV(*this);
1007 // This can only pop a single value, not called for structs.
1008 VM_POP();
1009 VM_RET;
1010 }
1011
1012 #define BCALLOPH(PRE,N,DECLS,ARGS,RETOP) VM_INS_RET VM::U_BCALL##PRE##N(int nfi) { \
1013 BCallProf(); \
1014 auto nf = nfr.nfuns[nfi]; \
1015 DECLS; \
1016 Value v = nf->fun.f##N ARGS; \
1017 RETOP; \
1018 VM_RET; \
1019 }
1020
1021 #define BCALLOP(N,DECLS,ARGS) \
1022 BCALLOPH(RET,N,DECLS,ARGS,VM_PUSH(v);BCallRetCheck(nf)) \
1023 BCALLOPH(REF,N,DECLS,ARGS,v.LTDECRTNIL(*this)) \
1024 BCALLOPH(UNB,N,DECLS,ARGS,(void)v)
1025
1026 BCALLOP(0, {}, (*this));
1027 BCALLOP(1, auto a0 = VM_POP(), (*this, a0));
1028 BCALLOP(2, auto a1 = VM_POP();auto a0 = VM_POP(), (*this, a0, a1));
1029 BCALLOP(3, auto a2 = VM_POP();auto a1 = VM_POP();auto a0 = VM_POP(), (*this, a0, a1, a2));
1030 BCALLOP(4, auto a3 = VM_POP();auto a2 = VM_POP();auto a1 = VM_POP();auto a0 = VM_POP(), (*this, a0, a1, a2, a3));
1031 BCALLOP(5, auto a4 = VM_POP();auto a3 = VM_POP();auto a2 = VM_POP();auto a1 = VM_POP();auto a0 = VM_POP(), (*this, a0, a1, a2, a3, a4));
1032 BCALLOP(6, auto a5 = VM_POP();auto a4 = VM_POP();auto a3 = VM_POP();auto a2 = VM_POP();auto a1 = VM_POP();auto a0 = VM_POP(), (*this, a0, a1, a2, a3, a4, a5));
1033 BCALLOP(7, auto a6 = VM_POP();auto a5 = VM_POP();auto a4 = VM_POP();auto a3 = VM_POP();auto a2 = VM_POP();auto a1 = VM_POP();auto a0 = VM_POP(), (*this, a0, a1, a2, a3, a4, a5, a6));
1034
U_ASSERTR(int line,int fileidx,int stringidx)1035 VM_INS_RET VM::U_ASSERTR(int line, int fileidx, int stringidx) {
1036 (void)line;
1037 (void)fileidx;
1038 if (!VM_TOP().True()) {
1039 Error(cat(
1040 #ifdef VM_COMPILED_CODE_MODE
1041 bcf->filenames()->Get(fileidx)->string_view(), "(", line, "): ",
1042 #endif
1043 "assertion failed: ", bcf->stringtable()->Get(stringidx)->string_view()));
1044 }
1045 VM_RET;
1046 }
1047
U_ASSERT(int line,int fileidx,int stringidx)1048 VM_INS_RET VM::U_ASSERT(int line, int fileidx, int stringidx) {
1049 U_ASSERTR(line, fileidx, stringidx);
1050 VM_POP();
1051 VM_RET;
1052 }
1053
U_NEWVEC(int ty,int len)1054 VM_INS_RET VM::U_NEWVEC(int ty, int len) {
1055 auto type = (type_elem_t)ty;
1056 auto vec = NewVec(len, len, type);
1057 if (len) vec->Init(*this, VM_TOPPTR() - len * vec->width, false);
1058 VM_POPN(len * (int)vec->width);
1059 VM_PUSH(Value(vec));
1060 VM_RET;
1061 }
1062
U_NEWOBJECT(int ty)1063 VM_INS_RET VM::U_NEWOBJECT(int ty) {
1064 auto type = (type_elem_t)ty;
1065 auto len = GetTypeInfo(type).len;
1066 auto vec = NewObject(len, type);
1067 if (len) vec->Init(*this, VM_TOPPTR() - len, len, false);
1068 VM_POPN(len);
1069 VM_PUSH(Value(vec));
1070 VM_RET;
1071 }
1072
U_POP()1073 VM_INS_RET VM::U_POP() { VM_POP(); VM_RET; }
U_POPREF()1074 VM_INS_RET VM::U_POPREF() { auto x = VM_POP(); x.LTDECRTNIL(*this); VM_RET; }
1075
U_POPV(int len)1076 VM_INS_RET VM::U_POPV(int len) { VM_POPN(len); VM_RET; }
U_POPVREF(int len)1077 VM_INS_RET VM::U_POPVREF(int len) { while (len--) VM_POP().LTDECRTNIL(*this); VM_RET; }
1078
U_DUP()1079 VM_INS_RET VM::U_DUP() { auto x = VM_TOP(); VM_PUSH(x); VM_RET; }
1080
1081 #define GETARGS() Value b = VM_POP(); Value a = VM_POP()
1082 #define TYPEOP(op, extras, field, errstat) Value res; errstat; \
1083 if (extras & 1 && b.field == 0) Div0(); res = a.field op b.field;
1084
1085 #define _IOP(op, extras) \
1086 TYPEOP(op, extras, ival(), assert(a.type == V_INT && b.type == V_INT))
1087 #define _FOP(op, extras) \
1088 TYPEOP(op, extras, fval(), assert(a.type == V_FLOAT && b.type == V_FLOAT))
1089
1090 #define _GETA() VM_TOPPTR() - len
1091 #define _VOP(op, extras, V_T, field, withscalar, geta) { \
1092 if (withscalar) { \
1093 auto b = VM_POP(); \
1094 VMTYPEEQ(b, V_T) \
1095 auto veca = geta; \
1096 for (int j = 0; j < len; j++) { \
1097 auto &a = veca[j]; \
1098 VMTYPEEQ(a, V_T) \
1099 auto bv = b.field(); \
1100 if (extras&1 && bv == 0) Div0(); \
1101 a = Value(a.field() op bv); \
1102 } \
1103 } else { \
1104 VM_POPN(len); \
1105 auto vecb = VM_TOPPTR(); \
1106 auto veca = geta; \
1107 for (int j = 0; j < len; j++) { \
1108 auto b = vecb[j]; \
1109 VMTYPEEQ(b, V_T) \
1110 auto &a = veca[j]; \
1111 VMTYPEEQ(a, V_T) \
1112 auto bv = b.field(); \
1113 if (extras & 1 && bv == 0) Div0(); \
1114 a = Value(a.field() op bv); \
1115 } \
1116 } \
1117 }
1118 #define STCOMPEN(op, init, andor) { \
1119 VM_POPN(len); \
1120 auto vecb = VM_TOPPTR(); \
1121 VM_POPN(len); \
1122 auto veca = VM_TOPPTR(); \
1123 auto all = init; \
1124 for (int j = 0; j < len; j++) { \
1125 all = all andor veca[j].any() op vecb[j].any(); \
1126 } \
1127 VM_PUSH(all); \
1128 VM_RET; \
1129 }
1130
1131 #define _IVOP(op, extras, withscalar, geta) _VOP(op, extras, V_INT, ival, withscalar, geta)
1132 #define _FVOP(op, extras, withscalar, geta) _VOP(op, extras, V_FLOAT, fval, withscalar, geta)
1133
1134 #define _SOP(op) Value res = *a.sval() op *b.sval()
1135 #define _SCAT() Value res = NewString(a.sval()->strv(), b.sval()->strv())
1136
1137 #define ACOMPEN(op) { GETARGS(); Value res = a.any() op b.any(); VM_PUSH(res); VM_RET; }
1138 #define IOP(op, extras) { GETARGS(); _IOP(op, extras); VM_PUSH(res); VM_RET; }
1139 #define FOP(op, extras) { GETARGS(); _FOP(op, extras); VM_PUSH(res); VM_RET; }
1140
1141 #define LOP(op) { GETARGS(); auto res = a.ip() op b.ip(); VM_PUSH(res); VM_RET; }
1142
1143 #define IVVOP(op, extras) { _IVOP(op, extras, false, _GETA()); VM_RET; }
1144 #define FVVOP(op, extras) { _FVOP(op, extras, false, _GETA()); VM_RET; }
1145 #define IVSOP(op, extras) { _IVOP(op, extras, true, _GETA()); VM_RET; }
1146 #define FVSOP(op, extras) { _FVOP(op, extras, true, _GETA()); VM_RET; }
1147
1148 #define SOP(op) { GETARGS(); _SOP(op); VM_PUSH(res); VM_RET; }
1149 #define SCAT() { GETARGS(); _SCAT(); VM_PUSH(res); VM_RET; }
1150
1151 // + += I F Vif S
1152 // - -= I F Vif
1153 // * *= I F Vif
1154 // / /= I F Vif
1155 // % %= I Vi
1156
1157 // < I F Vif S
1158 // > I F Vif S
1159 // <= I F Vif S
1160 // >= I F Vif S
1161 // == I F V S // FIXME differentiate struct / value / vector
1162 // != I F V S
1163
1164 // U- I F Vif
1165 // U! A
1166
U_IVVADD(int len)1167 VM_INS_RET VM::U_IVVADD(int len) { IVVOP(+, 0); }
U_IVVSUB(int len)1168 VM_INS_RET VM::U_IVVSUB(int len) { IVVOP(-, 0); }
U_IVVMUL(int len)1169 VM_INS_RET VM::U_IVVMUL(int len) { IVVOP(*, 0); }
U_IVVDIV(int len)1170 VM_INS_RET VM::U_IVVDIV(int len) { IVVOP(/, 1); }
U_IVVMOD(int)1171 VM_INS_RET VM::U_IVVMOD(int) { VMASSERT(0); VM_RET; }
U_IVVLT(int len)1172 VM_INS_RET VM::U_IVVLT(int len) { IVVOP(<, 0); }
U_IVVGT(int len)1173 VM_INS_RET VM::U_IVVGT(int len) { IVVOP(>, 0); }
U_IVVLE(int len)1174 VM_INS_RET VM::U_IVVLE(int len) { IVVOP(<=, 0); }
U_IVVGE(int len)1175 VM_INS_RET VM::U_IVVGE(int len) { IVVOP(>=, 0); }
U_FVVADD(int len)1176 VM_INS_RET VM::U_FVVADD(int len) { FVVOP(+, 0); }
U_FVVSUB(int len)1177 VM_INS_RET VM::U_FVVSUB(int len) { FVVOP(-, 0); }
U_FVVMUL(int len)1178 VM_INS_RET VM::U_FVVMUL(int len) { FVVOP(*, 0); }
U_FVVDIV(int len)1179 VM_INS_RET VM::U_FVVDIV(int len) { FVVOP(/, 1); }
U_FVVMOD(int)1180 VM_INS_RET VM::U_FVVMOD(int) { VMASSERT(0); VM_RET; }
U_FVVLT(int len)1181 VM_INS_RET VM::U_FVVLT(int len) { FVVOP(<, 0); }
U_FVVGT(int len)1182 VM_INS_RET VM::U_FVVGT(int len) { FVVOP(>, 0); }
U_FVVLE(int len)1183 VM_INS_RET VM::U_FVVLE(int len) { FVVOP(<=, 0); }
U_FVVGE(int len)1184 VM_INS_RET VM::U_FVVGE(int len) { FVVOP(>=, 0); }
1185
U_IVSADD(int len)1186 VM_INS_RET VM::U_IVSADD(int len) { IVSOP(+, 0); }
U_IVSSUB(int len)1187 VM_INS_RET VM::U_IVSSUB(int len) { IVSOP(-, 0); }
U_IVSMUL(int len)1188 VM_INS_RET VM::U_IVSMUL(int len) { IVSOP(*, 0); }
U_IVSDIV(int len)1189 VM_INS_RET VM::U_IVSDIV(int len) { IVSOP(/, 1); }
U_IVSMOD(int)1190 VM_INS_RET VM::U_IVSMOD(int) { VMASSERT(0); VM_RET; }
U_IVSLT(int len)1191 VM_INS_RET VM::U_IVSLT(int len) { IVSOP(<, 0); }
U_IVSGT(int len)1192 VM_INS_RET VM::U_IVSGT(int len) { IVSOP(>, 0); }
U_IVSLE(int len)1193 VM_INS_RET VM::U_IVSLE(int len) { IVSOP(<=, 0); }
U_IVSGE(int len)1194 VM_INS_RET VM::U_IVSGE(int len) { IVSOP(>=, 0); }
U_FVSADD(int len)1195 VM_INS_RET VM::U_FVSADD(int len) { FVSOP(+, 0); }
U_FVSSUB(int len)1196 VM_INS_RET VM::U_FVSSUB(int len) { FVSOP(-, 0); }
U_FVSMUL(int len)1197 VM_INS_RET VM::U_FVSMUL(int len) { FVSOP(*, 0); }
U_FVSDIV(int len)1198 VM_INS_RET VM::U_FVSDIV(int len) { FVSOP(/, 1); }
U_FVSMOD(int)1199 VM_INS_RET VM::U_FVSMOD(int) { VMASSERT(0); VM_RET; }
U_FVSLT(int len)1200 VM_INS_RET VM::U_FVSLT(int len) { FVSOP(<, 0); }
U_FVSGT(int len)1201 VM_INS_RET VM::U_FVSGT(int len) { FVSOP(>, 0); }
U_FVSLE(int len)1202 VM_INS_RET VM::U_FVSLE(int len) { FVSOP(<=, 0); }
U_FVSGE(int len)1203 VM_INS_RET VM::U_FVSGE(int len) { FVSOP(>=, 0); }
1204
U_AEQ()1205 VM_INS_RET VM::U_AEQ() { ACOMPEN(==); }
U_ANE()1206 VM_INS_RET VM::U_ANE() { ACOMPEN(!=); }
U_STEQ(int len)1207 VM_INS_RET VM::U_STEQ(int len) { STCOMPEN(==, true, &&); }
U_STNE(int len)1208 VM_INS_RET VM::U_STNE(int len) { STCOMPEN(!=, false, ||); }
U_LEQ()1209 VM_INS_RET VM::U_LEQ() { LOP(==); }
U_LNE()1210 VM_INS_RET VM::U_LNE() { LOP(!=); }
1211
U_IADD()1212 VM_INS_RET VM::U_IADD() { IOP(+, 0); }
U_ISUB()1213 VM_INS_RET VM::U_ISUB() { IOP(-, 0); }
U_IMUL()1214 VM_INS_RET VM::U_IMUL() { IOP(*, 0); }
U_IDIV()1215 VM_INS_RET VM::U_IDIV() { IOP(/ , 1); }
U_IMOD()1216 VM_INS_RET VM::U_IMOD() { IOP(%, 1); }
U_ILT()1217 VM_INS_RET VM::U_ILT() { IOP(<, 0); }
U_IGT()1218 VM_INS_RET VM::U_IGT() { IOP(>, 0); }
U_ILE()1219 VM_INS_RET VM::U_ILE() { IOP(<=, 0); }
U_IGE()1220 VM_INS_RET VM::U_IGE() { IOP(>=, 0); }
U_IEQ()1221 VM_INS_RET VM::U_IEQ() { IOP(==, 0); }
U_INE()1222 VM_INS_RET VM::U_INE() { IOP(!=, 0); }
1223
U_FADD()1224 VM_INS_RET VM::U_FADD() { FOP(+, 0); }
U_FSUB()1225 VM_INS_RET VM::U_FSUB() { FOP(-, 0); }
U_FMUL()1226 VM_INS_RET VM::U_FMUL() { FOP(*, 0); }
U_FDIV()1227 VM_INS_RET VM::U_FDIV() { FOP(/, 1); }
U_FMOD()1228 VM_INS_RET VM::U_FMOD() { VMASSERT(0); VM_RET; }
U_FLT()1229 VM_INS_RET VM::U_FLT() { FOP(<, 0); }
U_FGT()1230 VM_INS_RET VM::U_FGT() { FOP(>, 0); }
U_FLE()1231 VM_INS_RET VM::U_FLE() { FOP(<=, 0); }
U_FGE()1232 VM_INS_RET VM::U_FGE() { FOP(>=, 0); }
U_FEQ()1233 VM_INS_RET VM::U_FEQ() { FOP(==, 0); }
U_FNE()1234 VM_INS_RET VM::U_FNE() { FOP(!=, 0); }
1235
U_SADD()1236 VM_INS_RET VM::U_SADD() { SCAT(); }
U_SSUB()1237 VM_INS_RET VM::U_SSUB() { VMASSERT(0); VM_RET; }
U_SMUL()1238 VM_INS_RET VM::U_SMUL() { VMASSERT(0); VM_RET; }
U_SDIV()1239 VM_INS_RET VM::U_SDIV() { VMASSERT(0); VM_RET; }
U_SMOD()1240 VM_INS_RET VM::U_SMOD() { VMASSERT(0); VM_RET; }
U_SLT()1241 VM_INS_RET VM::U_SLT() { SOP(<); }
U_SGT()1242 VM_INS_RET VM::U_SGT() { SOP(>); }
U_SLE()1243 VM_INS_RET VM::U_SLE() { SOP(<=); }
U_SGE()1244 VM_INS_RET VM::U_SGE() { SOP(>=); }
U_SEQ()1245 VM_INS_RET VM::U_SEQ() { SOP(==); }
U_SNE()1246 VM_INS_RET VM::U_SNE() { SOP(!=); }
1247
U_IUMINUS()1248 VM_INS_RET VM::U_IUMINUS() { Value a = VM_POP(); VM_PUSH(Value(-a.ival())); VM_RET; }
U_FUMINUS()1249 VM_INS_RET VM::U_FUMINUS() { Value a = VM_POP(); VM_PUSH(Value(-a.fval())); VM_RET; }
1250
U_IVUMINUS(int len)1251 VM_INS_RET VM::U_IVUMINUS(int len) {
1252 auto vec = VM_TOPPTR() - len;
1253 for (int i = 0; i < len; i++) {
1254 auto &a = vec[i];
1255 VMTYPEEQ(a, V_INT);
1256 a = -a.ival();
1257 }
1258 VM_RET;
1259 }
U_FVUMINUS(int len)1260 VM_INS_RET VM::U_FVUMINUS(int len) {
1261 auto vec = VM_TOPPTR() - len;
1262 for (int i = 0; i < len; i++) {
1263 auto &a = vec[i];
1264 VMTYPEEQ(a, V_FLOAT);
1265 a = -a.fval();
1266 }
1267 VM_RET;
1268 }
1269
U_LOGNOT()1270 VM_INS_RET VM::U_LOGNOT() {
1271 Value a = VM_POP();
1272 VM_PUSH(!a.True());
1273 VM_RET;
1274 }
U_LOGNOTREF()1275 VM_INS_RET VM::U_LOGNOTREF() {
1276 Value a = VM_POP();
1277 bool b = a.True();
1278 VM_PUSH(!b);
1279 VM_RET;
1280 }
1281
1282 #define BITOP(op) { GETARGS(); VM_PUSH(a.ival() op b.ival()); VM_RET; }
U_BINAND()1283 VM_INS_RET VM::U_BINAND() { BITOP(&); }
U_BINOR()1284 VM_INS_RET VM::U_BINOR() { BITOP(|); }
U_XOR()1285 VM_INS_RET VM::U_XOR() { BITOP(^); }
U_ASL()1286 VM_INS_RET VM::U_ASL() { BITOP(<<); }
U_ASR()1287 VM_INS_RET VM::U_ASR() { BITOP(>>); }
U_NEG()1288 VM_INS_RET VM::U_NEG() { auto a = VM_POP(); VM_PUSH(~a.ival()); VM_RET; }
1289
U_I2F()1290 VM_INS_RET VM::U_I2F() {
1291 Value a = VM_POP();
1292 VMTYPEEQ(a, V_INT);
1293 VM_PUSH((float)a.ival());
1294 VM_RET;
1295 }
1296
U_A2S(int ty)1297 VM_INS_RET VM::U_A2S(int ty) {
1298 Value a = VM_POP();
1299 VM_PUSH(ToString(a, GetTypeInfo((type_elem_t)ty)));
1300 VM_RET;
1301 }
1302
U_ST2S(int ty)1303 VM_INS_RET VM::U_ST2S(int ty) {
1304 auto &ti = GetTypeInfo((type_elem_t)ty);
1305 VM_POPN(ti.len);
1306 auto top = VM_TOPPTR();
1307 VM_PUSH(StructToString(top, ti));
1308 VM_RET;
1309 }
1310
U_E2B()1311 VM_INS_RET VM::U_E2B() {
1312 Value a = VM_POP();
1313 VM_PUSH(a.True());
1314 VM_RET;
1315 }
1316
U_E2BREF()1317 VM_INS_RET VM::U_E2BREF() {
1318 Value a = VM_POP();
1319 VM_PUSH(a.True());
1320 VM_RET;
1321 }
1322
U_PUSHVAR(int vidx)1323 VM_INS_RET VM::U_PUSHVAR(int vidx) {
1324 VM_PUSH(vars[vidx]);
1325 VM_RET;
1326 }
1327
U_PUSHVARV(int vidx,int l)1328 VM_INS_RET VM::U_PUSHVARV(int vidx, int l) {
1329 tsnz_memcpy(VM_TOPPTR(), &vars[vidx], l);
1330 VM_PUSHN(l);
1331 VM_RET;
1332 }
1333
U_PUSHFLD(int i)1334 VM_INS_RET VM::U_PUSHFLD(int i) {
1335 Value r = VM_POP();
1336 VMASSERT(r.ref());
1337 assert(i < r.oval()->Len(*this));
1338 VM_PUSH(r.oval()->AtS(i));
1339 VM_RET;
1340 }
U_PUSHFLDMREF(int i)1341 VM_INS_RET VM::U_PUSHFLDMREF(int i) {
1342 Value r = VM_POP();
1343 if (!r.ref()) {
1344 VM_PUSH(r);
1345 } else {
1346 assert(i < r.oval()->Len(*this));
1347 VM_PUSH(r.oval()->AtS(i));
1348 }
1349 VM_RET;
1350 }
U_PUSHFLD2V(int i,int l)1351 VM_INS_RET VM::U_PUSHFLD2V(int i, int l) {
1352 Value r = VM_POP();
1353 VMASSERT(r.ref());
1354 assert(i + l <= r.oval()->Len(*this));
1355 tsnz_memcpy(VM_TOPPTR(), &r.oval()->AtS(i), l);
1356 VM_PUSHN(l);
1357 VM_RET;
1358 }
U_PUSHFLDV(int i,int l)1359 VM_INS_RET VM::U_PUSHFLDV(int i, int l) {
1360 VM_POPN(l);
1361 auto val = *(VM_TOPPTR() + i);
1362 VM_PUSH(val);
1363 VM_RET;
1364 }
U_PUSHFLDV2V(int i,int rl,int l)1365 VM_INS_RET VM::U_PUSHFLDV2V(int i, int rl, int l) {
1366 VM_POPN(l);
1367 t_memmove(VM_TOPPTR(), VM_TOPPTR() + i, rl);
1368 VM_PUSHN(rl);
1369 VM_RET;
1370 }
1371
U_VPUSHIDXI()1372 VM_INS_RET VM::U_VPUSHIDXI() { PushDerefIdxVector(VM_POP().ival()); VM_RET; }
U_VPUSHIDXV(int l)1373 VM_INS_RET VM::U_VPUSHIDXV(int l) { PushDerefIdxVector(GrabIndex(l)); VM_RET; }
U_VPUSHIDXIS(int w,int o)1374 VM_INS_RET VM::U_VPUSHIDXIS(int w, int o) { PushDerefIdxVectorSub(VM_POP().ival(), w, o); VM_RET; }
U_VPUSHIDXVS(int l,int w,int o)1375 VM_INS_RET VM::U_VPUSHIDXVS(int l, int w, int o) { PushDerefIdxVectorSub(GrabIndex(l), w, o); VM_RET; }
U_NPUSHIDXI(int l)1376 VM_INS_RET VM::U_NPUSHIDXI(int l) { PushDerefIdxStruct(VM_POP().ival(), l); VM_RET; }
U_SPUSHIDXI()1377 VM_INS_RET VM::U_SPUSHIDXI() { PushDerefIdxString(VM_POP().ival()); VM_RET; }
1378
U_PUSHLOC(int i)1379 VM_INS_RET VM::U_PUSHLOC(int i) {
1380 auto coro = VM_POP().cval();
1381 VM_PUSH(coro->GetVar(*this, i));
1382 VM_RET;
1383 }
1384
U_PUSHLOCV(int i,int l)1385 VM_INS_RET VM::U_PUSHLOCV(int i, int l) {
1386 auto coro = VM_POP().cval();
1387 tsnz_memcpy(VM_TOPPTR(), &coro->GetVar(*this, i), l);
1388 VM_PUSHN(l);
1389 VM_RET;
1390 }
1391
1392 #ifdef VM_COMPILED_CODE_MODE
1393 #define GJUMP(N, V, C, P) VM_JMP_RET VM::U_##N() \
1394 { V; if (C) { P; return true; } else { return false; } }
1395 #else
1396 #define GJUMP(N, V, C, P) VM_JMP_RET VM::U_##N() \
1397 { V; auto nip = *ip++; if (C) { ip = codestart + nip; P; } VM_RET; }
1398 #endif
1399
1400 GJUMP(JUMP , , true , )
1401 GJUMP(JUMPFAIL , auto x = VM_POP(), !x.True(), )
1402 GJUMP(JUMPFAILR , auto x = VM_POP(), !x.True(), VM_PUSH(x) )
1403 GJUMP(JUMPFAILN , auto x = VM_POP(), !x.True(), VM_PUSH(Value()))
1404 GJUMP(JUMPNOFAIL , auto x = VM_POP(), x.True(), )
1405 GJUMP(JUMPNOFAILR, auto x = VM_POP(), x.True(), VM_PUSH(x) )
1406
U_ISTYPE(int ty)1407 VM_INS_RET VM::U_ISTYPE(int ty) {
1408 auto to = (type_elem_t)ty;
1409 auto v = VM_POP();
1410 // Optimizer guarantees we don't have to deal with scalars.
1411 if (v.refnil()) VM_PUSH(v.ref()->tti == to);
1412 else VM_PUSH(GetTypeInfo(to).t == V_NIL); // FIXME: can replace by fixed type_elem_t ?
1413 VM_RET;
1414 }
1415
U_YIELD(VM_OP_ARGS_CALL)1416 VM_INS_RET VM::U_YIELD(VM_OP_ARGS_CALL) { CoYield(VM_FC_PASS_THRU); VM_RET; }
1417
1418 // This value never gets used anywhere, just a placeholder.
U_COCL()1419 VM_INS_RET VM::U_COCL() { VM_PUSH(Value(0, V_YIELD)); VM_RET; }
1420
U_CORO(VM_OP_ARGS VM_COMMA VM_OP_ARGS_CALL)1421 VM_INS_RET VM::U_CORO(VM_OP_ARGS VM_COMMA VM_OP_ARGS_CALL) { CoNew(VM_IP_PASS_THRU VM_COMMA VM_FC_PASS_THRU); VM_RET; }
1422
U_COEND()1423 VM_INS_RET VM::U_COEND() { CoClean(); VM_RET; }
1424
U_LOGREAD(int vidx)1425 VM_INS_RET VM::U_LOGREAD(int vidx) {
1426 auto val = VM_POP();
1427 VM_PUSH(vml.LogGet(val, vidx));
1428 VM_RET;
1429 }
1430
U_LOGWRITE(int vidx,int lidx)1431 VM_INS_RET VM::U_LOGWRITE(int vidx, int lidx) {
1432 vml.LogWrite(vars[vidx], lidx);
1433 VM_RET;
1434 }
1435
U_ABORT()1436 VM_INS_RET VM::U_ABORT() {
1437 Error("VM internal error: abort");
1438 VM_RET;
1439 }
1440
IDXErr(intp i,intp n,const RefObj * v)1441 void VM::IDXErr(intp i, intp n, const RefObj *v) {
1442 Error(cat("index ", i, " out of range ", n), v);
1443 }
1444 #define RANGECHECK(I, BOUND, VEC) if ((uintp)I >= (uintp)BOUND) IDXErr(I, BOUND, VEC);
1445
PushDerefIdxVector(intp i)1446 void VM::PushDerefIdxVector(intp i) {
1447 Value r = VM_POP();
1448 VMASSERT(r.ref());
1449 auto v = r.vval();
1450 RANGECHECK(i, v->len, v);
1451 v->AtVW(*this, i);
1452 }
1453
PushDerefIdxVectorSub(intp i,int width,int offset)1454 void VM::PushDerefIdxVectorSub(intp i, int width, int offset) {
1455 Value r = VM_POP();
1456 VMASSERT(r.ref());
1457 auto v = r.vval();
1458 RANGECHECK(i, v->len, v);
1459 v->AtVWSub(*this, i, width, offset);
1460 }
1461
PushDerefIdxStruct(intp i,int l)1462 void VM::PushDerefIdxStruct(intp i, int l) {
1463 VM_POPN(l);
1464 auto val = *(VM_TOPPTR() + i);
1465 VM_PUSH(val);
1466 }
1467
PushDerefIdxString(intp i)1468 void VM::PushDerefIdxString(intp i) {
1469 Value r = VM_POP();
1470 VMASSERT(r.ref());
1471 // Allow access of the terminating 0-byte.
1472 RANGECHECK(i, r.sval()->len + 1, r.sval());
1473 VM_PUSH(Value((int)((uchar *)r.sval()->data())[i]));
1474 }
1475
GetFieldLVal(intp i)1476 Value &VM::GetFieldLVal(intp i) {
1477 Value vec = VM_POP();
1478 #ifndef NDEBUG
1479 RANGECHECK(i, vec.oval()->Len(*this), vec.oval());
1480 #endif
1481 return vec.oval()->AtS(i);
1482 }
1483
GetFieldILVal(intp i)1484 Value &VM::GetFieldILVal(intp i) {
1485 Value vec = VM_POP();
1486 RANGECHECK(i, vec.oval()->Len(*this), vec.oval());
1487 return vec.oval()->AtS(i);
1488 }
1489
GetVecLVal(intp i)1490 Value &VM::GetVecLVal(intp i) {
1491 Value vec = VM_POP();
1492 auto v = vec.vval();
1493 RANGECHECK(i, v->len, v);
1494 return *v->AtSt(i);
1495 }
1496
GetLocLVal(int i)1497 Value &VM::GetLocLVal(int i) {
1498 Value coro = VM_POP();
1499 VMTYPEEQ(coro, V_COROUTINE);
1500 return coro.cval()->GetVar(*this, i);
1501 }
1502
1503 #pragma push_macro("LVAL")
1504 #undef LVAL
1505
1506 #define LVAL(N, V) VM_INS_RET VM::U_VAR_##N(int vidx VM_COMMA_IF(V) VM_OP_ARGSN(V)) \
1507 { LV_##N(vars[vidx] VM_COMMA_IF(V) VM_OP_PASSN(V)); VM_RET; }
1508 LVALOPNAMES
1509 #undef LVAL
1510
1511 #define LVAL(N, V) VM_INS_RET VM::U_FLD_##N(int i VM_COMMA_IF(V) VM_OP_ARGSN(V)) \
1512 { LV_##N(GetFieldLVal(i) VM_COMMA_IF(V) VM_OP_PASSN(V)); VM_RET; }
1513 LVALOPNAMES
1514 #undef LVAL
1515
1516 #define LVAL(N, V) VM_INS_RET VM::U_LOC_##N(int i VM_COMMA_IF(V) VM_OP_ARGSN(V)) \
1517 { LV_##N(GetLocLVal(i) VM_COMMA_IF(V) VM_OP_PASSN(V)); VM_RET; }
1518 LVALOPNAMES
1519 #undef LVAL
1520
1521 #define LVAL(N, V) VM_INS_RET VM::U_IDXVI_##N(VM_OP_ARGSN(V)) \
1522 { LV_##N(GetVecLVal(VM_POP().ival()) VM_COMMA_IF(V) VM_OP_PASSN(V)); VM_RET; }
1523 LVALOPNAMES
1524 #undef LVAL
1525
1526 #define LVAL(N, V) VM_INS_RET VM::U_IDXVV_##N(int l VM_COMMA_IF(V) VM_OP_ARGSN(V)) \
1527 { LV_##N(GetVecLVal(GrabIndex(l)) VM_COMMA_IF(V) VM_OP_PASSN(V)); VM_RET; }
1528 LVALOPNAMES
1529 #undef LVAL
1530
1531 #define LVAL(N, V) VM_INS_RET VM::U_IDXNI_##N(VM_OP_ARGSN(V)) \
1532 { LV_##N(GetFieldILVal(VM_POP().ival()) VM_COMMA_IF(V) VM_OP_PASSN(V)); VM_RET; }
1533 LVALOPNAMES
1534 #undef LVAL
1535
1536 #pragma pop_macro("LVAL")
1537
1538 #define LVALCASES(N, B) void VM::LV_##N(Value &a) { Value b = VM_POP(); B; }
1539 #define LVALCASER(N, B) void VM::LV_##N(Value &fa, int len) { B; }
1540 #define LVALCASESTR(N, B, B2) void VM::LV_##N(Value &a) { Value b = VM_POP(); B; a.LTDECRTNIL(*this); B2; }
1541
1542 LVALCASER(IVVADD , _IVOP(+, 0, false, &fa))
1543 LVALCASER(IVVADDR, _IVOP(+, 0, false, &fa))
1544 LVALCASER(IVVSUB , _IVOP(-, 0, false, &fa))
1545 LVALCASER(IVVSUBR, _IVOP(-, 0, false, &fa))
1546 LVALCASER(IVVMUL , _IVOP(*, 0, false, &fa))
1547 LVALCASER(IVVMULR, _IVOP(*, 0, false, &fa))
1548 LVALCASER(IVVDIV , _IVOP(/, 1, false, &fa))
1549 LVALCASER(IVVDIVR, _IVOP(/, 1, false, &fa))
1550 LVALCASER(IVVMOD , VMASSERT(0); (void)fa; (void)len)
1551 LVALCASER(IVVMODR, VMASSERT(0); (void)fa; (void)len)
1552
1553 LVALCASER(FVVADD , _FVOP(+, 0, false, &fa))
1554 LVALCASER(FVVADDR, _FVOP(+, 0, false, &fa))
1555 LVALCASER(FVVSUB , _FVOP(-, 0, false, &fa))
1556 LVALCASER(FVVSUBR, _FVOP(-, 0, false, &fa))
1557 LVALCASER(FVVMUL , _FVOP(*, 0, false, &fa))
1558 LVALCASER(FVVMULR, _FVOP(*, 0, false, &fa))
1559 LVALCASER(FVVDIV , _FVOP(/, 1, false, &fa))
1560 LVALCASER(FVVDIVR, _FVOP(/, 1, false, &fa))
1561
1562 LVALCASER(IVSADD , _IVOP(+, 0, true, &fa))
1563 LVALCASER(IVSADDR, _IVOP(+, 0, true, &fa))
1564 LVALCASER(IVSSUB , _IVOP(-, 0, true, &fa))
1565 LVALCASER(IVSSUBR, _IVOP(-, 0, true, &fa))
1566 LVALCASER(IVSMUL , _IVOP(*, 0, true, &fa))
1567 LVALCASER(IVSMULR, _IVOP(*, 0, true, &fa))
1568 LVALCASER(IVSDIV , _IVOP(/, 1, true, &fa))
1569 LVALCASER(IVSDIVR, _IVOP(/, 1, true, &fa))
1570 LVALCASER(IVSMOD , VMASSERT(0); (void)fa; (void)len)
1571 LVALCASER(IVSMODR, VMASSERT(0); (void)fa; (void)len)
1572
1573 LVALCASER(FVSADD , _FVOP(+, 0, true, &fa))
1574 LVALCASER(FVSADDR, _FVOP(+, 0, true, &fa))
1575 LVALCASER(FVSSUB , _FVOP(-, 0, true, &fa))
1576 LVALCASER(FVSSUBR, _FVOP(-, 0, true, &fa))
1577 LVALCASER(FVSMUL , _FVOP(*, 0, true, &fa))
1578 LVALCASER(FVSMULR, _FVOP(*, 0, true, &fa))
1579 LVALCASER(FVSDIV , _FVOP(/, 1, true, &fa))
1580 LVALCASER(FVSDIVR, _FVOP(/, 1, true, &fa))
1581
1582 LVALCASES(IADD , _IOP(+, 0); a = res; )
1583 LVALCASES(IADDR , _IOP(+, 0); a = res; VM_PUSH(res))
1584 LVALCASES(ISUB , _IOP(-, 0); a = res; )
1585 LVALCASES(ISUBR , _IOP(-, 0); a = res; VM_PUSH(res))
1586 LVALCASES(IMUL , _IOP(*, 0); a = res; )
1587 LVALCASES(IMULR , _IOP(*, 0); a = res; VM_PUSH(res))
1588 LVALCASES(IDIV , _IOP(/, 1); a = res; )
1589 LVALCASES(IDIVR , _IOP(/, 1); a = res; VM_PUSH(res))
1590 LVALCASES(IMOD , _IOP(%, 1); a = res; )
1591 LVALCASES(IMODR , _IOP(%, 1); a = res; VM_PUSH(res))
1592
1593 LVALCASES(FADD , _FOP(+, 0); a = res; )
1594 LVALCASES(FADDR , _FOP(+, 0); a = res; VM_PUSH(res))
1595 LVALCASES(FSUB , _FOP(-, 0); a = res; )
1596 LVALCASES(FSUBR , _FOP(-, 0); a = res; VM_PUSH(res))
1597 LVALCASES(FMUL , _FOP(*, 0); a = res; )
1598 LVALCASES(FMULR , _FOP(*, 0); a = res; VM_PUSH(res))
1599 LVALCASES(FDIV , _FOP(/, 1); a = res; )
1600 LVALCASES(FDIVR , _FOP(/, 1); a = res; VM_PUSH(res))
1601
LVALCASESTR(SADD,_SCAT (),a=res;)1602 LVALCASESTR(SADD , _SCAT(), a = res; )
1603 LVALCASESTR(SADDR, _SCAT(), a = res; VM_PUSH(res))
1604
1605 #define OVERWRITE_VAR(a, b) { assert(a.type == b.type || a.type == V_NIL || b.type == V_NIL); a = b; }
1606
1607 void VM::LV_WRITE (Value &a) { auto b = VM_POP(); OVERWRITE_VAR(a, b); }
LV_WRITER(Value & a)1608 void VM::LV_WRITER (Value &a) { auto &b = VM_TOP(); OVERWRITE_VAR(a, b); }
LV_WRITEREF(Value & a)1609 void VM::LV_WRITEREF (Value &a) { auto b = VM_POP(); a.LTDECRTNIL(*this); OVERWRITE_VAR(a, b); }
LV_WRITERREF(Value & a)1610 void VM::LV_WRITERREF(Value &a) { auto &b = VM_TOP(); a.LTDECRTNIL(*this); OVERWRITE_VAR(a, b); }
1611
1612 #define WRITESTRUCT(DECS) \
1613 auto b = VM_TOPPTR() - l; \
1614 if (DECS) for (int i = 0; i < l; i++) (&a)[i].LTDECRTNIL(*this); \
1615 tsnz_memcpy(&a, b, l);
1616
LV_WRITEV(Value & a,int l)1617 void VM::LV_WRITEV (Value &a, int l) { WRITESTRUCT(false); VM_POPN(l); }
LV_WRITERV(Value & a,int l)1618 void VM::LV_WRITERV (Value &a, int l) { WRITESTRUCT(false); }
LV_WRITEREFV(Value & a,int l)1619 void VM::LV_WRITEREFV (Value &a, int l) { WRITESTRUCT(true); VM_POPN(l); }
LV_WRITERREFV(Value & a,int l)1620 void VM::LV_WRITERREFV(Value &a, int l) { WRITESTRUCT(true); }
1621
1622 #define PPOP(name, ret, op, pre, accessor) void VM::LV_##name(Value &a) { \
1623 if (ret && !pre) VM_PUSH(a); \
1624 a.set##accessor(a.accessor() op 1); \
1625 if (ret && pre) VM_PUSH(a); \
1626 }
1627
1628 PPOP(IPP , false, +, true , ival)
1629 PPOP(IPPR , true , +, true , ival)
1630 PPOP(IMM , false, -, true , ival)
1631 PPOP(IMMR , true , -, true , ival)
1632 PPOP(IPPP , false, +, false, ival)
1633 PPOP(IPPPR, true , +, false, ival)
1634 PPOP(IMMP , false, -, false, ival)
1635 PPOP(IMMPR, true , -, false, ival)
1636 PPOP(FPP , false, +, true , fval)
1637 PPOP(FPPR , true , +, true , fval)
1638 PPOP(FMM , false, -, true , fval)
1639 PPOP(FMMR , true , -, true , fval)
1640 PPOP(FPPP , false, +, false, fval)
1641 PPOP(FPPPR, true , +, false, fval)
1642 PPOP(FMMP , false, -, false, fval)
1643 PPOP(FMMPR, true , -, false, fval)
1644
ProperTypeName(const TypeInfo & ti)1645 string VM::ProperTypeName(const TypeInfo &ti) {
1646 switch (ti.t) {
1647 case V_STRUCT_R:
1648 case V_STRUCT_S:
1649 case V_CLASS: return string(ReverseLookupType(ti.structidx));
1650 case V_NIL: return ProperTypeName(GetTypeInfo(ti.subt)) + "?";
1651 case V_VECTOR: return "[" + ProperTypeName(GetTypeInfo(ti.subt)) + "]";
1652 case V_INT: return ti.enumidx >= 0 ? string(EnumName(ti.enumidx)) : "int";
1653 default: return string(BaseTypeName(ti.t));
1654 }
1655 }
1656
BCallProf()1657 void VM::BCallProf() {
1658 #ifdef VM_PROFILER
1659 vm_count_bcalls++;
1660 #endif
1661 }
1662
BCallRetCheck(const NativeFun * nf)1663 void VM::BCallRetCheck(const NativeFun *nf) {
1664 #if RTT_ENABLED
1665 // See if any builtin function is lying about what type it returns
1666 // other function types return intermediary values that don't correspond to final return
1667 // values.
1668 if (!nf->cont1) {
1669 for (size_t i = 0; i < nf->retvals.v.size(); i++) {
1670 auto t = (VM_TOPPTR() - nf->retvals.v.size() + i)->type;
1671 auto u = nf->retvals.v[i].type->t;
1672 assert(t == u || u == V_ANY || u == V_NIL || (u == V_VECTOR && IsUDT(t)));
1673 }
1674 assert(nf->retvals.v.size() || VM_TOP().type == V_NIL);
1675 }
1676 #else
1677 (void)nf;
1678 #endif
1679 }
1680
GrabIndex(int len)1681 intp VM::GrabIndex(int len) {
1682 auto &v = VM_TOPM(len);
1683 for (len--; ; len--) {
1684 auto sidx = VM_POP().ival();
1685 if (!len) return sidx;
1686 RANGECHECK(sidx, v.vval()->len, v.vval());
1687 v = v.vval()->At(sidx);
1688 }
1689 }
1690
StructName(const TypeInfo & ti)1691 string_view VM::StructName(const TypeInfo &ti) {
1692 return bcf->udts()->Get(ti.structidx)->name()->string_view();
1693 }
1694
ReverseLookupType(uint v)1695 string_view VM::ReverseLookupType(uint v) {
1696 return bcf->udts()->Get(v)->name()->string_view();
1697 }
1698
EnumName(intp val,int enumidx)1699 string_view VM::EnumName(intp val, int enumidx) {
1700 auto &vals = *bcf->enums()->Get(enumidx)->vals();
1701 // FIXME: can store a bool that says wether this enum is contiguous, so we just index instead.
1702 for (auto v : vals)
1703 if (v->val() == val)
1704 return v->name()->string_view();
1705 return {};
1706 }
1707
EnumName(int enumidx)1708 string_view VM::EnumName(int enumidx) {
1709 return bcf->enums()->Get(enumidx)->name()->string_view();
1710 }
1711
LookupEnum(string_view name,int enumidx)1712 optional<int64_t> VM::LookupEnum(string_view name, int enumidx) {
1713 auto &vals = *bcf->enums()->Get(enumidx)->vals();
1714 for (auto v : vals)
1715 if (v->name()->string_view() == name)
1716 return v->val();
1717 return {};
1718 }
1719
StartWorkers(size_t numthreads)1720 void VM::StartWorkers(size_t numthreads) {
1721 if (is_worker) Error("workers can\'t start more worker threads");
1722 if (tuple_space) Error("workers already running");
1723 // Stop bad values from locking up the machine :)
1724 numthreads = min(numthreads, (size_t)256);
1725 tuple_space = new TupleSpace(bcf->udts()->size());
1726 for (size_t i = 0; i < numthreads; i++) {
1727 // Create a new VM that should own all its own memory and be completely independent
1728 // from this one.
1729 // We share nfr and programname for now since they're fully read-only.
1730 // FIXME: have to copy bytecode buffer even though it is read-only.
1731 auto vmargs = *(VMArgs *)this;
1732 vmargs.program_args.resize(0);
1733 vmargs.trace = TraceMode::OFF;
1734 auto wvm = new VM(std::move(vmargs));
1735 wvm->is_worker = true;
1736 wvm->tuple_space = tuple_space;
1737 workers.emplace_back([wvm] {
1738 string err;
1739 #ifdef USE_EXCEPTION_HANDLING
1740 try
1741 #endif
1742 {
1743 wvm->EvalProgram();
1744 }
1745 #ifdef USE_EXCEPTION_HANDLING
1746 catch (string &s) {
1747 if (s != "end-eval") err = s;
1748 }
1749 #endif
1750 delete wvm;
1751 // FIXME: instead return err to main thread?
1752 if (!err.empty()) LOG_ERROR("worker error: ", err);
1753 });
1754 }
1755 }
1756
TerminateWorkers()1757 void VM::TerminateWorkers() {
1758 if (is_worker || !tuple_space) return;
1759 tuple_space->alive = false;
1760 for (auto &tt : tuple_space->tupletypes) tt.condition.notify_all();
1761 for (auto &worker : workers) worker.join();
1762 workers.clear();
1763 delete tuple_space;
1764 tuple_space = nullptr;
1765 }
1766
WorkerWrite(RefObj * ref)1767 void VM::WorkerWrite(RefObj *ref) {
1768 if (!tuple_space) return;
1769 if (!ref) Error("thread write: nil reference");
1770 auto &ti = ref->ti(*this);
1771 if (ti.t != V_CLASS) Error("thread write: must be a class");
1772 auto st = (LObject *)ref;
1773 auto buf = new Value[ti.len];
1774 for (int i = 0; i < ti.len; i++) {
1775 // FIXME: lift this restriction.
1776 if (IsRefNil(GetTypeInfo(ti.elemtypes[i]).t))
1777 Error("thread write: only scalar class members supported for now");
1778 buf[i] = st->AtS(i);
1779 }
1780 auto &tt = tuple_space->tupletypes[ti.structidx];
1781 {
1782 unique_lock<mutex> lock(tt.mtx);
1783 tt.tuples.push_back(buf);
1784 }
1785 tt.condition.notify_one();
1786 }
1787
WorkerRead(type_elem_t tti)1788 LObject *VM::WorkerRead(type_elem_t tti) {
1789 auto &ti = GetTypeInfo(tti);
1790 if (ti.t != V_CLASS) Error("thread read: must be a class type");
1791 Value *buf = nullptr;
1792 auto &tt = tuple_space->tupletypes[ti.structidx];
1793 {
1794 unique_lock<mutex> lock(tt.mtx);
1795 tt.condition.wait(lock, [&] { return !tuple_space->alive || !tt.tuples.empty(); });
1796 if (!tt.tuples.empty()) {
1797 buf = tt.tuples.front();
1798 tt.tuples.pop_front();
1799 }
1800 }
1801 if (!buf) return nullptr;
1802 auto ns = NewObject(ti.len, tti);
1803 ns->Init(*this, buf, ti.len, false);
1804 delete[] buf;
1805 return ns;
1806 }
1807
1808 } // namespace lobster
1809
1810
1811 // Make VM ops available as C functions for linking purposes:
1812
1813 #ifdef VM_COMPILED_CODE_MODE
1814
1815 extern "C" {
1816
1817 using namespace lobster;
1818
1819 #ifndef NDEBUG
1820 #define CHECKI(B) \
1821 if (vm->trace != TraceMode::OFF) { \
1822 auto &ss = vm->TraceStream(); \
1823 ss << B; \
1824 if (vm->trace == TraceMode::TAIL) ss << '\n'; else LOG_PROGRAM(ss.str()); \
1825 }
1826 // FIXME: add spaces.
1827 #define CHECK(N, A) CHECKI(#N << cat A)
1828 #define CHECKJ(N) CHECKI(#N)
1829 #else
1830 #define CHECK(N, A)
1831 #define CHECKJ(N)
1832 #endif
1833
1834
CVM_SetNextCallTarget(VM * vm,block_t fcont)1835 void CVM_SetNextCallTarget(VM *vm, block_t fcont) {
1836 vm->next_call_target = fcont;
1837 }
1838
CVM_GetNextCallTarget(VM * vm)1839 block_t CVM_GetNextCallTarget(VM *vm) {
1840 return vm->next_call_target;
1841 }
1842
1843 #define F(N, A) \
1844 void CVM_##N(VM *vm VM_COMMA_IF(A) VM_OP_ARGSN(A)) { \
1845 CHECK(N, (VM_OP_PASSN(A))); vm->U_##N(VM_OP_PASSN(A)); }
1846 LVALOPNAMES
1847 #undef F
1848 #define F(N, A) \
1849 void CVM_##N(VM *vm VM_COMMA_IF(A) VM_OP_ARGSN(A)) { \
1850 CHECK(N, (VM_OP_PASSN(A))); vm->U_##N(VM_OP_PASSN(A)); }
1851 ILBASENAMES
1852 #undef F
1853 #define F(N, A) \
1854 void CVM_##N(VM *vm VM_COMMA_IF(A) VM_OP_ARGSN(A), block_t fcont) { \
1855 CHECK(N, (VM_OP_PASSN(A))); vm->U_##N(VM_OP_PASSN(A) VM_COMMA_IF(A) fcont); }
1856 ILCALLNAMES
1857 #undef F
1858 #define F(N, A) \
1859 bool CVM_##N(VM *vm) { \
1860 CHECKJ(N); return vm->U_##N(); }
1861 ILJUMPNAMES
1862 #undef F
1863
1864 } // extern "C"
1865
1866 #endif // VM_COMPILED_CODE_MODE
1867