1 // Copyright 2014 Wouter van Oortmerssen. All rights reserved.
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //     http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 
15 namespace lobster {
16 
17 #define FLATBUFFERS_DEBUG_VERIFICATION_FAILURE
18 #include "lobster/bytecode_generated.h"
19 
20 struct CodeGen  {
21     vector<int> code;
22     vector<uchar> code_attr;
23     vector<bytecode::LineInfo> lineinfo;
24     vector<bytecode::SpecIdent> sids;
25     Parser &parser;
26     vector<const Node *> linenumbernodes;
27     vector<tuple<int, const SubFunction *>> call_fixups;
28     SymbolTable &st;
29     vector<type_elem_t> type_table, vint_typeoffsets, vfloat_typeoffsets;
30     map<vector<type_elem_t>, type_elem_t> type_lookup;  // Wasteful, but simple.
31     vector<TypeLT> rettypes, temptypestack;
32     size_t nested_fors = 0;
33     vector<string_view> stringtable;  // sized strings.
34     vector<const Node *> node_context;
35     vector<int> speclogvars;  // Index into specidents.
36     int keepvars = 0;
37     int runtime_checks;
38     vector<int> vtables;
39 
PosCodeGen40     int Pos() { return (int)code.size(); }
41 
GrowCodeAttrCodeGen42     void GrowCodeAttr(size_t mins) {
43         while (mins > code_attr.size()) code_attr.push_back(bytecode::Attr_NONE);
44     }
45 
EmitCodeGen46     void Emit(int i) {
47         auto &ln = linenumbernodes.back()->line;
48         if (lineinfo.empty() || ln.line != lineinfo.back().line() ||
49             ln.fileidx != lineinfo.back().fileidx())
50             lineinfo.push_back(bytecode::LineInfo(ln.line, ln.fileidx, Pos()));
51         code.push_back(i);
52         GrowCodeAttr(code.size());
53     }
54 
SplitAttrCodeGen55     void SplitAttr(int at) {
56         GrowCodeAttr(at + 1);
57         code_attr[at] |= bytecode::Attr_SPLIT;
58     }
59 
EmitCodeGen60     void Emit(int i, int j) { Emit(i); Emit(j); }
EmitCodeGen61     void Emit(int i, int j, int k) { Emit(i); Emit(j); Emit(k); }
EmitCodeGen62     void Emit(int i, int j, int k, int l) { Emit(i); Emit(j); Emit(k); Emit(l); }
63 
SetLabelCodeGen64     void SetLabel(int jumploc) {
65         code[jumploc - 1] = Pos();
66         SplitAttr(Pos());
67     }
68 
69     const size_t ti_num_udt_fields = 4;
70     const size_t ti_num_udt_per_field = 2;
71 
72     void PushFields(UDT *udt, vector<type_elem_t> &tt, type_elem_t parent = (type_elem_t)-1) {
73         for (auto &field : udt->fields.v) {
74             auto ti = GetTypeTableOffset(field.type);
75             if (IsStruct(field.type->t)) {
76                 PushFields(field.type->udt, tt, ti);
77             } else {
78               tt.insert(tt.begin() + (tt.size() - ti_num_udt_fields) / ti_num_udt_per_field +
79                             ti_num_udt_fields,
80                         ti);
81                 tt.push_back(parent);
82             }
83         }
84     }
85 
86     // Make a table for use as VM runtime type.
GetTypeTableOffsetCodeGen87     type_elem_t GetTypeTableOffset(TypeRef type) {
88         vector<type_elem_t> tt;
89         tt.push_back((type_elem_t)type->t);
90         switch (type->t) {
91             case V_INT:
92                 tt.push_back((type_elem_t)(type->e ? type->e->idx : -1));
93                 break;
94             case V_NIL:
95             case V_VECTOR:
96                 tt.push_back(GetTypeTableOffset(type->sub));
97                 break;
98             case V_FUNCTION:
99                 tt.push_back((type_elem_t)type->sf->idx);
100                 break;
101             case V_COROUTINE:
102                 if (type->sf) {
103                     if (type->sf->cotypeinfo >= 0)
104                         return type->sf->cotypeinfo;
105                     type->sf->cotypeinfo = (type_elem_t)type_table.size();
106                     // Reserve space, so other types can be added afterwards safely.
107                     type_table.insert(type_table.end(), 3, (type_elem_t)0);
108                     tt.push_back((type_elem_t)type->sf->idx);
109                     tt.push_back(GetTypeTableOffset(type->sf->returntype));
110                     std::copy(tt.begin(), tt.end(), type_table.begin() + type->sf->cotypeinfo);
111                     return type->sf->cotypeinfo;
112                 } else {
113                     tt.push_back((type_elem_t)-1);
114                     tt.push_back(TYPE_ELEM_ANY);
115                 }
116                 break;
117             case V_CLASS:
118             case V_STRUCT_R:
119             case V_STRUCT_S: {
120                 if (type->udt->typeinfo >= 0)
121                     return type->udt->typeinfo;
122                 type->udt->typeinfo = (type_elem_t)type_table.size();
123                 // Reserve space, so other types can be added afterwards safely.
124                 assert(type->udt->numslots >= 0);
125                 auto ttsize =
126                     (size_t)(type->udt->numslots * ti_num_udt_per_field) + ti_num_udt_fields;
127                 type_table.insert(type_table.end(), ttsize, (type_elem_t)0);
128                 tt.push_back((type_elem_t)type->udt->idx);
129                 tt.push_back((type_elem_t)type->udt->numslots);
130                 tt.push_back((type_elem_t)type->udt->vtable_start);
131                 PushFields(type->udt, tt);
132                 assert(tt.size() == ttsize);
133                 std::copy(tt.begin(), tt.end(), type_table.begin() + type->udt->typeinfo);
134                 return type->udt->typeinfo;
135             }
136             case V_VAR:
137                 // This can happen with an empty [] vector that was never bound to anything.
138                 // Should be benign to use any, since it is never accessed anywhere.
139                 // FIXME: would be even better to check this case before codegen, since this may
140                 // mask bugs.
141                 return GetTypeTableOffset(type_any);
142             default:
143                 assert(IsRuntime(type->t));
144                 break;
145         }
146         // For everything that's not a struct / known coroutine:
147         auto it = type_lookup.find(tt);
148         if (it != type_lookup.end()) return it->second;
149         auto offset = (type_elem_t)type_table.size();
150         type_lookup[tt] = offset;
151         type_table.insert(type_table.end(), tt.begin(), tt.end());
152         return offset;
153     }
154 
CodeGenCodeGen155     CodeGen(Parser &_p, SymbolTable &_st, bool return_value, int runtime_checks)
156         : parser(_p), st(_st), runtime_checks(runtime_checks) {
157         // Reserve space and index for all vtables.
158         for (auto udt : st.udttable) {
159             udt->vtable_start = (int)vtables.size();
160             vtables.insert(vtables.end(), udt->dispatch.size(), -1);
161         }
162         // Pre-load some types into the table, must correspond to order of type_elem_t enums.
163                                                             GetTypeTableOffset(type_int);
164                                                             GetTypeTableOffset(type_float);
165                                                             GetTypeTableOffset(type_string);
166                                                             GetTypeTableOffset(type_resource);
167                                                             GetTypeTableOffset(type_any);
168         Type type_valuebuf(V_VALUEBUF);                     GetTypeTableOffset(&type_valuebuf);
169         Type type_stackframebuf(V_STACKFRAMEBUF);           GetTypeTableOffset(&type_stackframebuf);
170                                                             GetTypeTableOffset(type_vector_int);
171                                                             GetTypeTableOffset(type_vector_float);
172         Type type_vec_str(V_VECTOR, &*type_string);         GetTypeTableOffset(&type_vec_str);
173         Type type_v_v_int(V_VECTOR, &*type_vector_int);     GetTypeTableOffset(&type_v_v_int);
174         Type type_v_v_float(V_VECTOR, &*type_vector_float); GetTypeTableOffset(&type_v_v_float);
175         assert(type_table.size() == TYPE_ELEM_FIXED_OFFSET_END);
176         for (auto type : st.default_int_vector_types[0])
177             vint_typeoffsets.push_back(!type.Null() ? GetTypeTableOffset(type) : (type_elem_t)-1);
178         for (auto type : st.default_float_vector_types[0])
179             vfloat_typeoffsets.push_back(!type.Null() ? GetTypeTableOffset(type) : (type_elem_t)-1);
180         int sidx = 0;
181         for (auto sid : st.specidents) {
182             if (!sid->type.Null()) {  // Null ones are in unused functions.
183                 auto tti = GetTypeTableOffset(sid->type);
184                 assert(!IsStruct(sid->type->t) || sid->type->udt->numslots >= 0);
185                 sid->sidx = sidx;
186                 auto ns = ValWidth(sid->type);
187                 sidx += ns;
188                 if (sid->id->logvar) {
189                     sid->logvaridx = (int)speclogvars.size();
190                     speclogvars.push_back(sid->Idx());
191                 }
192                 for (int i = 0; i < ns; i++)
193                     sids.push_back(bytecode::SpecIdent(sid->id->idx, tti));
194             }
195         }
196         linenumbernodes.push_back(parser.root);
197         SplitAttr(0);
198         Emit(IL_JUMP, 0);
199         auto fundefjump = Pos();
200         SplitAttr(Pos());
201         for (auto f : parser.st.functiontable) {
202             if (f->bytecodestart <= 0 && !f->istype) {
203                 f->bytecodestart = Pos();
204                 for (auto sf : f->overloads) for (; sf; sf = sf->next) {
205                     if (sf && sf->typechecked) GenScope(*sf);
206                 }
207             }
208         }
209         // Generate a dummmy function for function values that are never called.
210         // Would be good if the optimizer guarantees these don't exist, but for now this is
211         // more debuggable if it does happen to get called.
212         auto dummyfun = Pos();
213         SplitAttr(dummyfun);
214         Emit(IL_FUNSTART, -1, 0, 0);
215         Emit(0, 0);  // keepvars, ownedvars
216         Emit(IL_ABORT);
217         // Emit the root function.
218         SetLabel(fundefjump);
219         SplitAttr(Pos());
220         Gen(parser.root, return_value);
221         auto type = parser.root->exptype;
222         assert(type->NumValues() == (size_t)return_value);
223         Emit(IL_EXIT, return_value ? GetTypeTableOffset(type): -1);
224         SplitAttr(Pos());  // Allow off by one indexing.
225         linenumbernodes.pop_back();
226         for (auto &[loc, sf] : call_fixups) {
227             auto bytecodestart = sf->subbytecodestart;
228             if (!bytecodestart) bytecodestart = dummyfun;
229             assert(!code[loc]);
230             code[loc] = bytecodestart;
231         }
232         // Now fill in the vtables.
233         for (auto udt : st.udttable) {
234             for (auto [i, de] : enumerate(udt->dispatch)) {
235                 if (de.sf) {
236                     vtables[udt->vtable_start + i] = de.sf->subbytecodestart;
237                 }
238             }
239         }
240     }
241 
~CodeGenCodeGen242     ~CodeGen() {
243     }
244 
245     // FIXME: remove.
DummyCodeGen246     void Dummy(size_t retval) {
247         assert(!retval);
248         while (retval--) Emit(IL_PUSHNIL);
249     }
250 
GenScopeCodeGen251     void GenScope(SubFunction &sf) {
252         if (sf.subbytecodestart > 0) return;
253         keepvars = 0;
254         sf.subbytecodestart = Pos();
255         if (!sf.typechecked) {
256             auto s = DumpNode(*sf.body, 0, false);
257             LOG_DEBUG("untypechecked: ", sf.parent->name, " : ", s);
258             assert(0);
259         }
260         vector<int> ownedvars;
261         linenumbernodes.push_back(sf.body);
262         SplitAttr(Pos());
263         Emit(IL_FUNSTART);
264         Emit(sf.parent->idx);
265         auto emitvars = [&](const vector<Arg> &v) {
266             auto nvarspos = Pos();
267             Emit(0);
268             auto nvars = 0;
269             for (auto &arg : v) {
270                 auto n = ValWidth(arg.sid->type);
271                 for (int i = 0; i < n; i++) {
272                     Emit(arg.sid->Idx() + i);
273                     nvars++;
274                     if (ShouldDec(IsStruct(arg.sid->type->t)
275                                   ? TypeLT { FindSlot(*arg.sid->type->udt, i)->type, arg.sid->lt }
276                                   : TypeLT { *arg.sid }) && !arg.sid->consume_on_last_use) {
277                         ownedvars.push_back(arg.sid->Idx() + i);
278                     }
279                 }
280             }
281             code[nvarspos] = nvars;
282         };
283         emitvars(sf.args.v);
284         emitvars(sf.locals.v);
285         auto keepvarspos = Pos();
286         Emit(0);
287         // FIXME: don't really want to emit these.. instead should ensure someone takes
288         // ownership of them.
289         Emit((int)ownedvars.size());
290         for (auto si : ownedvars) Emit(si);
291         if (sf.body) for (auto c : sf.body->children) {
292             Gen(c, 0);
293             if (runtime_checks >= RUNTIME_ASSERT_PLUS)
294                 Emit(IL_ENDSTATEMENT, c->line.line, c->line.fileidx);
295         }
296         else Dummy(sf.reqret);
297         assert(temptypestack.empty());
298         code[keepvarspos] = keepvars;
299         linenumbernodes.pop_back();
300     }
301 
302     // This must be called explicitly when any values are consumed.
TakeTempCodeGen303     void TakeTemp(size_t n, bool can_handle_structs) {
304         assert(node_context.size());
305         for (; n; n--) {
306             auto tlt = temptypestack.back();
307             temptypestack.pop_back();
308             assert(can_handle_structs || ValWidth(tlt.type) == 1); (void)tlt;
309             (void)can_handle_structs;
310         }
311     }
312 
GenFixupCodeGen313     void GenFixup(const SubFunction *sf) {
314         assert(sf->body);
315         auto pos = Pos() - 1;
316         if (!code[pos]) call_fixups.push_back({ pos, sf });
317     }
318 
GenCallCodeGen319     void GenCall(const SubFunction &sf, int vtable_idx, const List *args, size_t retval) {
320         auto &f = *sf.parent;
321         for (auto c : args->children) {
322             Gen(c, 1);
323         }
324         size_t nargs = args->children.size();
325         if (f.nargs() != nargs)
326             parser.Error(cat("call to function ", f.name, " needs ", f.nargs(),
327                              " arguments, ", nargs, " given"), node_context.back());
328         TakeTemp(nargs, true);
329         if (vtable_idx < 0) {
330             Emit(IL_CALL, sf.subbytecodestart);
331             GenFixup(&sf);
332         } else {
333             int stack_depth = -1;
334             for (auto c : args->children) stack_depth += ValWidth(c->exptype);
335             Emit(IL_DDCALL, vtable_idx, stack_depth);
336         }
337         SplitAttr(Pos());
338         auto nretvals = sf.returntype->NumValues();
339         for (size_t i = 0; i < nretvals; i++) {
340             if (retval) {
341                 rettypes.push_back({ sf, i });
342             } else {
343                 // FIXME: better if this is impossible by making sure typechecker makes it !reqret.
344                 GenPop({ sf, i });
345             }
346         }
347     };
348 
GenFloatCodeGen349     void GenFloat(double f) {
350         if ((float)f == f) {
351             Emit(IL_PUSHFLT);
352             int2float i2f;
353             i2f.f = (float)f;
354             Emit(i2f.i);
355         } else {
356             Emit(IL_PUSHFLT64);
357             int2float64 i2f;
358             i2f.f = f;
359             Emit((int)i2f.i);
360             Emit((int)(i2f.i >> 32));
361         }
362     }
363 
ShouldDecCodeGen364     bool ShouldDec(TypeLT typelt) {
365         return IsRefNil(typelt.type->t) && typelt.lt == LT_KEEP;
366     }
367 
GenPopCodeGen368     void GenPop(TypeLT typelt) {
369         if (IsStruct(typelt.type->t)) {
370             Emit(typelt.type->t == V_STRUCT_R ? IL_POPVREF : IL_POPV, typelt.type->udt->numslots);
371         } else {
372             Emit(ShouldDec(typelt) ? IL_POPREF : IL_POP);
373         }
374     }
375 
GenDupCodeGen376     void GenDup(TypeLT tlt) {
377         Emit(IL_DUP);
378         temptypestack.push_back(tlt);
379     }
380 
GenCodeGen381     void Gen(const Node *n, size_t retval) {
382         // Generate() below generate no retvals if retval==0, otherwise they generate however many
383         // they can irrespective of retval, optionally record that in rettypes for the more complex
384         // cases. Then at the end of this function the two get matched up.
385         auto tempstartsize = temptypestack.size();
386         linenumbernodes.push_back(n);
387 
388         node_context.push_back(n);
389         n->Generate(*this, retval);
390         node_context.pop_back();
391 
392         assert(n->exptype->t != V_UNDEFINED);
393 
394         assert(tempstartsize == temptypestack.size());
395         (void)tempstartsize;
396         // If 0, the above code already made sure to not generate value(s).
397         if (retval) {
398             // default case, no rettypes specified.
399             if (rettypes.empty()) {
400                 for (size_t i = 0; i < n->exptype->NumValues(); i++)
401                     rettypes.push_back(TypeLT { *n, i });
402             }
403             // if the caller doesn't want all return values, just pop em
404             if (rettypes.size() > retval) {
405                 while (rettypes.size() > retval) {
406                     GenPop(rettypes.back());
407                     rettypes.pop_back();
408                 }
409             }
410             assert(rettypes.size() == retval);
411             // Copy return types on temp stack.
412             while (rettypes.size()) {
413                 temptypestack.push_back(rettypes.front());
414                 rettypes.erase(rettypes.begin());
415             }
416         }
417         assert(rettypes.empty());
418         linenumbernodes.pop_back();
419     }
420 
VarModifiedCodeGen421     void VarModified(SpecIdent *sid) {
422         if (sid->id->logvar) Emit(IL_LOGWRITE, sid->Idx(), sid->logvaridx);
423     }
424 
GenStructInsCodeGen425     void GenStructIns(TypeRef type) {
426         if (IsStruct(type->t)) Emit(ValWidth(type));
427     }
428 
GenValueSizeCodeGen429     void GenValueSize(TypeRef type) {
430         // FIXME: struct variable size.
431         Emit(IL_PUSHINT, ValWidth(type));
432     }
433 
GenAssignCodeGen434     void GenAssign(const Node *lval, int lvalop, size_t retval,
435                    const Node *rhs, int take_temp) {
436         assert(node_context.back()->exptype->NumValues() == retval);
437         auto type = lval->exptype;
438         if (lvalop >= LVO_IADD && lvalop <= LVO_IMOD) {
439             if (type->t == V_INT) {
440             } else if (type->t == V_FLOAT)  {
441                 assert(lvalop != LVO_IMOD); lvalop += LVO_FADD - LVO_IADD;
442             } else if (type->t == V_STRING) {
443                 assert(lvalop == LVO_IADD); lvalop = LVO_SADD;
444             } else if (type->t == V_STRUCT_S) {
445                 auto sub = type->udt->sametype;
446                 bool withscalar = IsScalar(rhs->exptype->t);
447                 if (sub->t == V_INT) {
448                     lvalop += (withscalar ? LVO_IVSADD : LVO_IVVADD) - LVO_IADD;
449                 } else if (sub->t == V_FLOAT) {
450                     assert(lvalop != LVO_IMOD);
451                     lvalop += (withscalar ? LVO_FVSADD : LVO_FVVADD) - LVO_IADD;
452                 } else assert(false);
453             } else {
454                 assert(false);
455             }
456         } else if (lvalop >= LVO_IPP && lvalop <= LVO_IMMP) {
457             if (type->t == V_FLOAT) lvalop += LVO_FPP - LVO_IPP;
458             else assert(type->t == V_INT);
459         }
460         if (retval) lvalop++;
461         if (rhs) Gen(rhs, 1);
462         if (auto idr = Is<IdentRef>(lval)) {
463             TakeTemp(take_temp, true);
464             Emit(GENLVALOP(VAR, lvalop), idr->sid->Idx()); GenStructIns(idr->sid->type);
465             VarModified(idr->sid);
466         } else if (auto dot = Is<Dot>(lval)) {
467             auto stype = dot->children[0]->exptype;
468             assert(IsUDT(stype->t));  // Ensured by typechecker.
469             auto idx = stype->udt->Has(dot->fld);
470             assert(idx >= 0);
471             auto &field = stype->udt->fields.v[idx];
472             Gen(dot->children[0], 1);
473             TakeTemp(take_temp + 1, true);
474             Emit(GENLVALOP(FLD, lvalop), field.slot); GenStructIns(field.type);
475         } else if (auto cod = Is<CoDot>(lval)) {
476             auto ir = AssertIs<IdentRef>(cod->variable);
477             Gen(cod->coroutine, 1);
478             TakeTemp(take_temp + 1, true);
479             Emit(GENLVALOP(LOC, lvalop), ir->sid->Idx()); GenStructIns(ir->sid->type);
480         } else if (auto indexing = Is<Indexing>(lval)) {
481             Gen(indexing->object, 1);
482             Gen(indexing->index, 1);
483             TakeTemp(take_temp + 2, true);
484             switch (indexing->object->exptype->t) {
485                 case V_VECTOR:
486                     Emit(indexing->index->exptype->t == V_INT
487                          ? GENLVALOP(IDXVI, lvalop)
488                          : GENLVALOP(IDXVV, lvalop));
489                     GenStructIns(indexing->index->exptype);  // When index is struct.
490                     GenStructIns(type);  // When vector elem is struct.
491                     break;
492                 case V_CLASS:
493                     assert(indexing->index->exptype->t == V_INT &&
494                            indexing->object->exptype->udt->sametype->Numeric());
495                     Emit(GENLVALOP(IDXNI, lvalop));
496                     break;
497                 case V_STRUCT_R:
498                 case V_STRUCT_S:
499                 case V_STRING:
500                     // FIXME: Would be better to catch this in typechecking, but typechecker does
501                     // not currently distinquish lvalues.
502                     parser.Error("cannot use this type as lvalue", lval);
503                 default:
504                     assert(false);
505             }
506         } else {
507             parser.Error("lvalue required", lval);
508         }
509     }
510 
GenMathOpCodeGen511     void GenMathOp(const BinOp *n, size_t retval, int opc) {
512         Gen(n->left, retval);
513         Gen(n->right, retval);
514         if (retval) GenMathOp(n->left->exptype, n->right->exptype, n->exptype, opc);
515     }
516 
GenMathOpCodeGen517     void GenMathOp(TypeRef ltype, TypeRef rtype, TypeRef ptype, int opc) {
518         TakeTemp(2, true);
519         // Have to check right and left because comparison ops generate ints for node
520         // overall.
521         if (rtype->t == V_INT && ltype->t == V_INT) {
522             Emit(IL_IADD + opc);
523         } else if (rtype->t == V_FLOAT && ltype->t == V_FLOAT) {
524             Emit(IL_FADD + opc);
525         } else if (rtype->t == V_STRING && ltype->t == V_STRING) {
526             Emit(IL_SADD + opc);
527         } else if (rtype->t == V_FUNCTION && ltype->t == V_FUNCTION) {
528             assert(opc == MOP_EQ || opc == MOP_NE);
529             Emit(IL_LEQ + (opc - MOP_EQ));
530         } else {
531             if (opc >= MOP_EQ) {  // EQ/NEQ
532                 if (IsStruct(ltype->t)) {
533                     Emit(IL_STEQ + opc - MOP_EQ);
534                     GenStructIns(ltype);
535                 } else {
536                     assert(IsRefNil(ltype->t) &&
537                            IsRefNil(rtype->t));
538                     Emit(IL_AEQ + opc - MOP_EQ);
539                 }
540             } else {
541                 // If this is a comparison op, be sure to use the child type.
542                 TypeRef vectype = opc >= MOP_LT ? ltype : ptype;
543                 assert(vectype->t == V_STRUCT_S);
544                 auto sub = vectype->udt->sametype;
545                 bool withscalar = IsScalar(rtype->t);
546                 if (sub->t == V_INT)
547                     Emit((withscalar ? IL_IVSADD : IL_IVVADD) + opc);
548                 else if (sub->t == V_FLOAT)
549                     Emit((withscalar ? IL_FVSADD : IL_FVVADD) + opc);
550                 else assert(false);
551                 GenStructIns(vectype);
552             }
553         }
554     }
555 
GenBitOpCodeGen556     void GenBitOp(const BinOp *n, size_t retval, ILOP opc) {
557         Gen(n->left, retval);
558         Gen(n->right, retval);
559         if (retval) {
560             TakeTemp(2, false);
561             Emit(opc);
562         }
563     }
564 
AssignBaseOpCodeGen565     int AssignBaseOp(TypeLT typelt) {
566         return IsStruct(typelt.type->t)
567             ? (typelt.type->t == V_STRUCT_R ? LVO_WRITEREFV : LVO_WRITEV)
568             : (ShouldDec(typelt) ? LVO_WRITEREF : LVO_WRITE);
569     }
570 
GenPushVarCodeGen571     void GenPushVar(size_t retval, TypeRef type, int offset) {
572         if (!retval) return;
573         if (IsStruct(type->t)) {
574             Emit(IL_PUSHVARV, offset);
575             GenStructIns(type);
576         } else {
577             Emit(IL_PUSHVAR, offset);
578         }
579     }
580 
GenPushFieldCodeGen581     void GenPushField(size_t retval, Node *object, TypeRef stype, TypeRef ftype, int offset) {
582         if (IsStruct(stype->t)) {
583             // Attempt to not generate object at all, by reading the field inline.
584             if (auto idr = Is<IdentRef>(object)) {
585                 GenPushVar(retval, ftype, idr->sid->Idx() + offset);
586                 return;
587             } else if (auto dot = Is<Dot>(object)) {
588                 auto sstype = dot->children[0]->exptype;
589                 assert(IsUDT(sstype->t));
590                 auto idx = sstype->udt->Has(dot->fld);
591                 assert(idx >= 0);
592                 auto &field = sstype->udt->fields.v[idx];
593                 assert(field.slot >= 0);
594                 GenPushField(retval, dot->children[0], sstype, ftype, field.slot + offset);
595                 return;
596             } else if (auto cod = Is<CoDot>(object)) {
597                 // This is already a very slow op, so not worth further optimizing for now.
598                 (void)cod;
599             } else if (auto indexing = Is<Indexing>(object)) {
600                 // For now only do this for vectors.
601                 if (indexing->object->exptype->t == V_VECTOR) {
602                     GenPushIndex(retval, indexing->object, indexing->index, ValWidth(ftype), offset);
603                     return;
604                 }
605             }
606         }
607         Gen(object, retval);
608         if (!retval) return;
609         TakeTemp(1, true);
610         if (IsStruct(stype->t)) {
611             if (IsStruct(ftype->t)) {
612                 Emit(IL_PUSHFLDV2V, offset);
613                 GenStructIns(ftype);
614             } else {
615                 Emit(IL_PUSHFLDV, offset);
616             }
617             GenStructIns(stype);
618         } else {
619             if (IsStruct(ftype->t)) {
620                 Emit(IL_PUSHFLD2V, offset);
621                 GenStructIns(ftype);
622             } else {
623                 Emit(IL_PUSHFLD, offset);
624             }
625         }
626     }
627 
628     void GenPushIndex(size_t retval, Node *object, Node *index, int width = -1, int offset = -1) {
629         Gen(object, retval);
630         Gen(index, retval);
631         if (!retval) return;
632         TakeTemp(2, true);
633         switch (object->exptype->t) {
634             case V_VECTOR: {
635                 auto etype = object->exptype;
636                 if (index->exptype->t == V_INT) {
637                     etype = etype->Element();
638                 } else {
639                     auto &udt = *index->exptype->udt;
640                     for (auto &field : udt.fields.v) {
641                         (void)field;
642                         etype = etype->Element();
643                     }
644                 }
645                 if (width < 0) {
646                     Emit(index->exptype->t == V_INT ? IL_VPUSHIDXI : IL_VPUSHIDXV);
647                     GenStructIns(index->exptype);
648                 } else {
649                     // We're indexing a sub-part of the element.
650                     Emit(index->exptype->t == V_INT ? IL_VPUSHIDXIS : IL_VPUSHIDXVS);
651                     GenStructIns(index->exptype);
652                     Emit(width, offset);
653                 }
654                 break;
655             }
656             case V_STRUCT_S:
657                 assert(index->exptype->t == V_INT && object->exptype->udt->sametype->Numeric());
658                 Emit(IL_NPUSHIDXI);
659                 GenStructIns(object->exptype);
660                 break;
661             case V_STRING:
662                 assert(index->exptype->t == V_INT);
663                 Emit(IL_SPUSHIDXI);
664                 break;
665             default:
666                 assert(false);
667         }
668     }
669 };
670 
Generate(CodeGen & cg,size_t retval)671 void Nil::Generate(CodeGen &cg, size_t retval) const {
672     if (retval) { cg.Emit(IL_PUSHNIL); }
673 }
674 
Generate(CodeGen & cg,size_t retval)675 void IntConstant::Generate(CodeGen &cg, size_t retval) const {
676     if (!retval) return;
677     if (integer == (int)integer) cg.Emit(IL_PUSHINT, (int)integer);
678     else cg.Emit(IL_PUSHINT64, (int)integer, (int)(integer >> 32));
679 }
680 
Generate(CodeGen & cg,size_t retval)681 void FloatConstant::Generate(CodeGen &cg, size_t retval) const {
682     if (retval) { cg.GenFloat(flt); };
683 }
684 
Generate(CodeGen & cg,size_t retval)685 void StringConstant::Generate(CodeGen &cg, size_t retval) const {
686     if (!retval) return;
687     cg.Emit(IL_PUSHSTR, (int)cg.stringtable.size());
688     cg.stringtable.push_back(str);
689 }
690 
Generate(CodeGen & cg,size_t retval)691 void DefaultVal::Generate(CodeGen &cg, size_t retval) const {
692     if (!retval) return;
693     // Optional args are indicated by being nillable, but for structs passed to builtins the type
694     // has already been made non-nil.
695     switch (exptype->ElementIfNil()->t) {
696         case V_INT:   cg.Emit(IL_PUSHINT, 0); break;
697         case V_FLOAT: cg.GenFloat(0); break;
698         default:      cg.Emit(IL_PUSHNIL); break;
699     }
700 }
701 
Generate(CodeGen & cg,size_t retval)702 void IdentRef::Generate(CodeGen &cg, size_t retval) const {
703     cg.GenPushVar(retval, sid->type, sid->Idx());
704 }
705 
Generate(CodeGen & cg,size_t retval)706 void Dot::Generate(CodeGen &cg, size_t retval) const {
707     auto stype = children[0]->exptype;
708     assert(IsUDT(stype->t));
709     auto idx = stype->udt->Has(fld);
710     assert(idx >= 0);
711     auto &field = stype->udt->fields.v[idx];
712     assert(field.slot >= 0);
713     cg.GenPushField(retval, children[0], stype, field.type, field.slot);
714 }
715 
Generate(CodeGen & cg,size_t retval)716 void Indexing::Generate(CodeGen &cg, size_t retval) const {
717     cg.GenPushIndex(retval, object, index);
718 }
719 
Generate(CodeGen &,size_t)720 void GenericCall::Generate(CodeGen &, size_t /*retval*/) const {
721     assert(false);
722 }
723 
Generate(CodeGen & cg,size_t retval)724 void CoDot::Generate(CodeGen &cg, size_t retval) const {
725     cg.Gen(coroutine, retval);
726     if (retval) {
727         cg.TakeTemp(1, false);
728         auto sid = AssertIs<IdentRef>(variable)->sid;
729         if (IsStruct(sid->type->t)) {
730             cg.Emit(IL_PUSHLOCV, sid->Idx());
731             cg.GenStructIns(sid->type);
732         } else {
733             cg.Emit(IL_PUSHLOC, sid->Idx());
734         }
735     }
736 }
737 
Generate(CodeGen & cg,size_t retval)738 void AssignList::Generate(CodeGen &cg, size_t retval) const {
739     cg.Gen(children.back(), children.size() - 1);
740     for (size_t i = children.size() - 1; i-- > 0; ) {
741         auto left = children[i];
742         auto id = Is<IdentRef>(left);
743         auto llt = id ? id->sid->lt : LT_KEEP /* Dot */;
744         cg.GenAssign(left, cg.AssignBaseOp({ left->exptype, llt }), 0, nullptr, 1);
745     }
746     assert(!retval);  // Type checker guarantees this.
747     (void)retval;
748 }
749 
Generate(CodeGen & cg,size_t retval)750 void Define::Generate(CodeGen &cg, size_t retval) const {
751     cg.Gen(child, sids.size());
752     for (size_t i = sids.size(); i-- > 0; ) {
753         auto sid = sids[i].first;
754         if (sid->id->logvar)
755             cg.Emit(IL_LOGREAD, sid->logvaridx);
756         cg.TakeTemp(1, true);
757         // FIXME: Sadly, even though FunIntro now guarantees that variables start as V_NIL,
758         // we still can't replace this with a WRITEDEF that doesn't have to decrement, since
759         // loops with inlined bodies cause this def to be execute multiple times.
760         // (also: multiple copies of the same inlined function in one parent).
761         // We should emit a specialized opcode for these cases only.
762         cg.Emit(GENLVALOP(VAR, cg.AssignBaseOp({ *sid })), sid->Idx());
763         cg.GenStructIns(sid->type);
764         cg.VarModified(sid);
765     }
766     assert(!retval);  // Parser guarantees this.
767     (void)retval;
768 }
769 
Generate(CodeGen & cg,size_t retval)770 void Assign::Generate(CodeGen &cg, size_t retval) const {
771     cg.GenAssign(left, cg.AssignBaseOp({ *right, 0 }), retval, right, 1);
772 }
773 
Generate(CodeGen & cg,size_t retval)774 void PlusEq::Generate(CodeGen &cg, size_t retval) const {
775     cg.GenAssign(left, LVO_IADD, retval, right, 1);
776 }
Generate(CodeGen & cg,size_t retval)777 void MinusEq::Generate(CodeGen &cg, size_t retval) const {
778     cg.GenAssign(left, LVO_ISUB, retval, right, 1);
779 }
Generate(CodeGen & cg,size_t retval)780 void MultiplyEq::Generate(CodeGen &cg, size_t retval) const {
781     cg.GenAssign(left, LVO_IMUL, retval, right, 1);
782 }
Generate(CodeGen & cg,size_t retval)783 void DivideEq::Generate(CodeGen &cg, size_t retval) const {
784     cg.GenAssign(left, LVO_IDIV, retval, right, 1);
785 }
Generate(CodeGen & cg,size_t retval)786 void ModEq::Generate(CodeGen &cg, size_t retval) const {
787     cg.GenAssign(left, LVO_IMOD, retval, right, 1);
788 }
789 
Generate(CodeGen & cg,size_t retval)790 void PostDecr::Generate(CodeGen &cg, size_t retval) const { cg.GenAssign(child, LVO_IMMP, retval, nullptr, 0); }
Generate(CodeGen & cg,size_t retval)791 void PostIncr::Generate(CodeGen &cg, size_t retval) const { cg.GenAssign(child, LVO_IPPP, retval, nullptr, 0); }
Generate(CodeGen & cg,size_t retval)792 void PreDecr ::Generate(CodeGen &cg, size_t retval) const { cg.GenAssign(child, LVO_IMM,  retval, nullptr, 0); }
Generate(CodeGen & cg,size_t retval)793 void PreIncr ::Generate(CodeGen &cg, size_t retval) const { cg.GenAssign(child, LVO_IPP,  retval, nullptr, 0); }
794 
Generate(CodeGen & cg,size_t retval)795 void NotEqual     ::Generate(CodeGen &cg, size_t retval) const { cg.GenMathOp(this, retval, MOP_NE);  }
Generate(CodeGen & cg,size_t retval)796 void Equal        ::Generate(CodeGen &cg, size_t retval) const { cg.GenMathOp(this, retval, MOP_EQ);  }
Generate(CodeGen & cg,size_t retval)797 void GreaterThanEq::Generate(CodeGen &cg, size_t retval) const { cg.GenMathOp(this, retval, MOP_GE);  }
Generate(CodeGen & cg,size_t retval)798 void LessThanEq   ::Generate(CodeGen &cg, size_t retval) const { cg.GenMathOp(this, retval, MOP_LE);  }
Generate(CodeGen & cg,size_t retval)799 void GreaterThan  ::Generate(CodeGen &cg, size_t retval) const { cg.GenMathOp(this, retval, MOP_GT);  }
Generate(CodeGen & cg,size_t retval)800 void LessThan     ::Generate(CodeGen &cg, size_t retval) const { cg.GenMathOp(this, retval, MOP_LT);  }
Generate(CodeGen & cg,size_t retval)801 void Mod          ::Generate(CodeGen &cg, size_t retval) const { cg.GenMathOp(this, retval, MOP_MOD); }
Generate(CodeGen & cg,size_t retval)802 void Divide       ::Generate(CodeGen &cg, size_t retval) const { cg.GenMathOp(this, retval, MOP_DIV); }
Generate(CodeGen & cg,size_t retval)803 void Multiply     ::Generate(CodeGen &cg, size_t retval) const { cg.GenMathOp(this, retval, MOP_MUL); }
Generate(CodeGen & cg,size_t retval)804 void Minus        ::Generate(CodeGen &cg, size_t retval) const { cg.GenMathOp(this, retval, MOP_SUB); }
Generate(CodeGen & cg,size_t retval)805 void Plus         ::Generate(CodeGen &cg, size_t retval) const { cg.GenMathOp(this, retval, MOP_ADD); }
806 
Generate(CodeGen & cg,size_t retval)807 void UnaryMinus::Generate(CodeGen &cg, size_t retval) const {
808     cg.Gen(child, retval);
809     if (!retval) return;
810     cg.TakeTemp(1, true);
811     auto ctype = child->exptype;
812     switch (ctype->t) {
813         case V_INT: cg.Emit(IL_IUMINUS); break;
814         case V_FLOAT: cg.Emit(IL_FUMINUS); break;
815         case V_STRUCT_S: {
816             auto elem = ctype->udt->sametype->t;
817             cg.Emit(elem == V_INT ? IL_IVUMINUS : IL_FVUMINUS);
818             cg.GenStructIns(ctype);
819             break;
820         }
821         default: assert(false);
822     }
823 }
824 
Generate(CodeGen & cg,size_t retval)825 void BitAnd    ::Generate(CodeGen &cg, size_t retval) const { cg.GenBitOp(this, retval, IL_BINAND); }
Generate(CodeGen & cg,size_t retval)826 void BitOr     ::Generate(CodeGen &cg, size_t retval) const { cg.GenBitOp(this, retval, IL_BINOR); }
Generate(CodeGen & cg,size_t retval)827 void Xor       ::Generate(CodeGen &cg, size_t retval) const { cg.GenBitOp(this, retval, IL_XOR); }
Generate(CodeGen & cg,size_t retval)828 void ShiftLeft ::Generate(CodeGen &cg, size_t retval) const { cg.GenBitOp(this, retval, IL_ASL); }
Generate(CodeGen & cg,size_t retval)829 void ShiftRight::Generate(CodeGen &cg, size_t retval) const { cg.GenBitOp(this, retval, IL_ASR); }
830 
Generate(CodeGen & cg,size_t retval)831 void Negate::Generate(CodeGen &cg, size_t retval) const {
832     cg.Gen(child, retval);
833     if (!retval) return;
834     cg.TakeTemp(1, false);
835     cg.Emit(IL_NEG);
836 }
837 
Generate(CodeGen & cg,size_t retval)838 void ToFloat::Generate(CodeGen &cg, size_t retval) const {
839     cg.Gen(child, retval);
840     if (!retval) return;
841     cg.TakeTemp(1, false);
842     cg.Emit(IL_I2F);
843 }
844 
Generate(CodeGen & cg,size_t retval)845 void ToString::Generate(CodeGen &cg, size_t retval) const {
846     cg.Gen(child, retval);
847     if (!retval) return;
848     cg.TakeTemp(1, true);
849     switch (child->exptype->t) {
850         case V_STRUCT_R:
851         case V_STRUCT_S: {
852             // TODO: can also roll these into A2S?
853             cg.Emit(IL_ST2S, cg.GetTypeTableOffset(child->exptype));
854             break;
855         }
856         default: {
857             cg.Emit(IL_A2S, cg.GetTypeTableOffset(child->exptype->ElementIfNil()));
858             break;
859         }
860     }
861 }
862 
Generate(CodeGen & cg,size_t retval)863 void ToBool::Generate(CodeGen &cg, size_t retval) const {
864     cg.Gen(child, retval);
865     if (!retval) return;
866     cg.TakeTemp(1, false);
867     cg.Emit(IsRefNil(child->exptype->t) ? IL_E2BREF : IL_E2B);
868 }
869 
Generate(CodeGen & cg,size_t retval)870 void ToInt::Generate(CodeGen &cg, size_t retval) const {
871     cg.Gen(child, retval);
872     // No actual opcode needed, this node is purely to store correct types.
873     if (retval) cg.TakeTemp(1, false);
874 }
875 
Generate(CodeGen & cg,size_t retval)876 void ToLifetime::Generate(CodeGen &cg, size_t retval) const {
877     cg.Gen(child, retval);
878     for (size_t i = 0; i < retval; i++) {
879         // We have to check for reftype again, since typechecker allowed V_VAR values that may
880         // have become scalars by now.
881         if (IsRefNil(child->exptype->Get(i)->t)) {
882             assert(i < cg.temptypestack.size());
883             auto fi = (int)(retval - i - 1);
884             auto type = cg.temptypestack[cg.temptypestack.size() - 1 - fi].type;
885             if (incref & (1LL << i)) {
886                 assert(IsRefNil(type->t));
887                 cg.Emit(IL_INCREF, fi);
888             }
889             if (decref & (1LL << i)) {
890                 assert(IsRefNil(type->t));
891                 int stack_depth = 0;
892                 for (auto &tlt : cg.temptypestack) stack_depth += ValWidth(tlt.type);
893                 if (type->t == V_STRUCT_R) {
894                     // TODO: alternatively emit a single op with a list or bitmask?
895                     for (int j = 0; j < type->udt->numslots; j++) {
896                         if (IsRefNil(FindSlot(*type->udt, j)->type->t))
897                             cg.Emit(IL_KEEPREF, fi + j, cg.keepvars++ + stack_depth);
898                     }
899                 } else {
900                     cg.Emit(IL_KEEPREF, fi, cg.keepvars++ + stack_depth);
901                 }
902             }
903         }
904     }
905     // We did not consume these, so we have to pass them on.
906     for (size_t i = 0; i < retval; i++) {
907         cg.rettypes.push_back(cg.temptypestack.back());
908         cg.temptypestack.pop_back();
909     }
910 }
911 
Generate(CodeGen & cg,size_t retval)912 void FunRef::Generate(CodeGen &cg, size_t retval) const {
913     if (!retval) return;
914     // If no body, then the function has been optimized away, meaning this
915     // function value will never be used.
916     // FIXME: instead, ensure such values are removed by the optimizer.
917     if (sf->parent->anonymous && sf->body) {
918         cg.Emit(IL_PUSHFUN, sf->subbytecodestart);
919         cg.GenFixup(sf);
920     } else {
921         cg.Dummy(retval);
922     }
923 }
924 
Generate(CodeGen & cg,size_t retval)925 void EnumRef::Generate(CodeGen &cg, size_t retval) const {
926     cg.Dummy(retval);
927 }
928 
Generate(CodeGen & cg,size_t retval)929 void UDTRef::Generate(CodeGen &cg, size_t retval) const {
930     cg.Dummy(retval);
931 }
932 
Generate(CodeGen & cg,size_t retval)933 void NativeCall::Generate(CodeGen &cg, size_t retval) const {
934     if (nf->IsAssert()) {
935         // FIXME: lift this into a language feature.
936         auto c = children[0];
937         if (retval || cg.runtime_checks >= RUNTIME_ASSERT) {
938             cg.Gen(c, 1);
939             cg.TakeTemp(1, false);
940             if (cg.runtime_checks >= RUNTIME_ASSERT) {
941                 cg.Emit(IL_ASSERT + (!!retval), c->line.line, c->line.fileidx);
942                 cg.Emit((int)cg.stringtable.size());
943                 // FIXME: would be better to use the original source code here.
944                 cg.stringtable.push_back(cg.st.StoreName(DumpNode(*c, 0, true)));
945             }
946         } else {
947             cg.Gen(c, 0);
948         }
949         return;
950     }
951     // TODO: could pass arg types in here if most exps have types, cheaper than
952     // doing it all in call instruction?
953     size_t numstructs = 0;
954     for (auto [i, c] : enumerate(children)) {
955         cg.Gen(c, 1);
956         if ((IsStruct(c->exptype->t) ||
957              nf->args.v[i].flags & NF_PUSHVALUEWIDTH) &&
958             !Is<DefaultVal>(c)) {
959             cg.GenValueSize(c->exptype);
960             cg.temptypestack.push_back({ type_int, LT_ANY });
961             numstructs++;
962         }
963     }
964     size_t nargs = children.size();
965     cg.TakeTemp(nargs + numstructs, true);
966     assert(nargs == nf->args.size() && (nf->fun.fnargs < 0 || nargs <= 7));
967     int vmop = nf->fun.fnargs >= 0 ? IL_BCALLRET0 + (int)nargs * 3 : IL_BCALLRETV;
968     if (nf->cont1) { // graphics.h
969         auto lastarg = children.empty() ? nullptr : children.back();
970         if (!Is<DefaultVal>(lastarg)) {
971             cg.Emit(vmop, nf->idx);
972             cg.Emit(IL_CALLVCOND);  // FIXME: doesn't need to be COND anymore?
973             cg.SplitAttr(cg.Pos());
974             assert(lastarg->exptype->t == V_FUNCTION);
975             assert(!lastarg->exptype->sf->reqret);  // We never use the retval.
976             cg.Emit(IL_CONT1, nf->idx);  // Never returns a value.
977             cg.Dummy(retval);
978         } else {
979             if (!retval) vmop += 2;  // These always return nil.
980             cg.Emit(vmop, nf->idx);
981         }
982     } else if (nf->CanChangeControlFlow()) {
983         cg.Emit(vmop, nf->idx);
984         cg.SplitAttr(cg.Pos());
985         if (!retval) cg.GenPop({ nattype, natlt });
986     } else {
987         auto last = nattype->NumValues() - 1;
988         auto tlt = TypeLT { nattype->Get(last), nattype->GetLifetime(last, natlt) };
989         // FIXME: simplify.
990         auto val_width_1 = !IsStruct(tlt.type->t) || tlt.type->udt->numslots == 1;
991         auto var_width_void = nf->fun.fnargs < 0 && nf->retvals.v.empty();
992         if (!retval && val_width_1 && !var_width_void) {
993             // Generate version that never produces top of stack (but still may have
994             // additional return values)
995             vmop++;
996             if (!cg.ShouldDec(tlt))
997                 vmop++;
998         }
999         cg.Emit(vmop, nf->idx);
1000         if (!retval && !val_width_1) {
1001             cg.GenPop(tlt);
1002         }
1003     }
1004     if (nf->retvals.v.size() > 1) {
1005         assert(nf->retvals.v.size() == nattype->NumValues());
1006         for (size_t i = 0; i < nattype->NumValues(); i++) {
1007             cg.rettypes.push_back({ nattype->Get(i), nattype->GetLifetime(i, natlt) });
1008         }
1009     } else {
1010         assert(nf->retvals.v.size() >= retval);
1011     }
1012     if (!retval) {
1013         // Top of stack has already been removed by op, but still need to pop any
1014         // additional values.
1015         if (cg.rettypes.size()) cg.rettypes.pop_back();
1016         while (cg.rettypes.size()) {
1017             cg.GenPop(cg.rettypes.back());
1018             cg.rettypes.pop_back();
1019         }
1020     }
1021 }
1022 
Generate(CodeGen & cg,size_t retval)1023 void Call::Generate(CodeGen &cg, size_t retval) const {
1024     cg.GenCall(*sf, vtable_idx, this, retval);
1025 }
1026 
Generate(CodeGen & cg,size_t retval)1027 void DynCall::Generate(CodeGen &cg, size_t retval) const {
1028     if (sid->type->t == V_YIELD) {
1029         if (Arity()) {
1030             for (auto c : children) {
1031                 cg.Gen(c, 1);
1032             }
1033             size_t nargs = children.size();
1034             cg.TakeTemp(nargs, false);
1035             assert(nargs == 1);
1036         } else {
1037             cg.Emit(IL_PUSHNIL);
1038         }
1039         cg.Emit(IL_YIELD);
1040         // We may have temps on the stack from an enclosing for.
1041         // Check that these temps are actually from for loops, to not mask bugs.
1042         assert(cg.temptypestack.size() == cg.nested_fors * 2);
1043         cg.SplitAttr(cg.Pos());
1044         if (!retval) cg.GenPop({ exptype, lt });
1045     } else {
1046         assert(sf && sf == sid->type->sf);
1047         // FIXME: in the future, we can make a special case for istype calls.
1048         if (!sf->parent->istype) {
1049             // We statically know which function this is calling.
1050             // We can now turn this into a normal call.
1051             cg.GenCall(*sf, -1, this, retval);
1052         } else {
1053             for (auto c : children) {
1054                 cg.Gen(c, 1);
1055             }
1056             size_t nargs = children.size();
1057             assert(nargs == sf->args.size());
1058             cg.Emit(IL_PUSHVAR, sid->Idx());
1059             cg.TakeTemp(nargs, true);
1060             cg.Emit(IL_CALLV);
1061             cg.SplitAttr(cg.Pos());
1062             if (sf->reqret) {
1063                 if (!retval) cg.GenPop({ exptype, lt });
1064             } else {
1065                 cg.Dummy(retval);
1066             }
1067         }
1068     }
1069 }
1070 
Generate(CodeGen &,size_t)1071 void List::Generate(CodeGen & /*cg*/, size_t /*retval*/) const {
1072     assert(false);  // Handled by individual parents.
1073 }
1074 
Generate(CodeGen &,size_t)1075 void TypeAnnotation::Generate(CodeGen & /*cg*/, size_t /*retval*/) const {
1076     assert(false);  // Handled by individual parents.
1077 }
1078 
Generate(CodeGen &,size_t)1079 void Unary::Generate(CodeGen & /*cg*/, size_t /*retval*/) const {
1080     assert(false);  // Handled by individual parents.
1081 }
1082 
Generate(CodeGen &,size_t)1083 void Coercion::Generate(CodeGen & /*cg*/, size_t /*retval*/) const {
1084     assert(false);  // Handled by individual parents.
1085 }
1086 
Generate(CodeGen &,size_t)1087 void BinOp::Generate(CodeGen & /*cg*/, size_t /*retval*/) const {
1088     assert(false);  // Handled by individual parents.
1089 }
1090 
Generate(CodeGen & cg,size_t retval)1091 void Inlined::Generate(CodeGen &cg, size_t retval) const {
1092     for (auto c : children) {
1093         auto rv = c != children.back() ? 0 : retval;
1094         cg.Gen(c, rv);
1095         if (rv) cg.TakeTemp(retval != 0, true);
1096     }
1097 }
1098 
Generate(CodeGen & cg,size_t retval)1099 void Seq::Generate(CodeGen &cg, size_t retval) const {
1100     cg.Gen(head, 0);
1101     cg.Gen(tail, retval);
1102     if (retval) cg.TakeTemp(1, true);
1103 }
1104 
Generate(CodeGen & cg,size_t retval)1105 void MultipleReturn::Generate(CodeGen &cg, size_t retval) const {
1106     for (auto [i, c] : enumerate(children))
1107         cg.Gen(c, i < retval);
1108     cg.TakeTemp(retval, true);
1109     for (auto[i, c] : enumerate(children))
1110         if (i < retval)
1111             cg.rettypes.push_back({ c->exptype, c->lt });
1112 }
1113 
Generate(CodeGen & cg,size_t retval)1114 void And::Generate(CodeGen &cg, size_t retval) const {
1115     cg.Gen(left, 1);
1116     cg.TakeTemp(1, false);
1117     cg.Emit(retval ? IL_JUMPFAILR : IL_JUMPFAIL, 0);
1118     auto loc = cg.Pos();
1119     cg.Gen(right, retval);
1120     if (retval) cg.TakeTemp(1, false);
1121     cg.SetLabel(loc);
1122 }
1123 
Generate(CodeGen & cg,size_t retval)1124 void Or::Generate(CodeGen &cg, size_t retval) const {
1125     cg.Gen(left, 1);
1126     cg.TakeTemp(1, false);
1127     cg.Emit(retval ? IL_JUMPNOFAILR : IL_JUMPNOFAIL, 0);
1128     auto loc = cg.Pos();
1129     cg.Gen(right, retval);
1130     if (retval) cg.TakeTemp(1, false);
1131     cg.SetLabel(loc);
1132 }
1133 
Generate(CodeGen & cg,size_t retval)1134 void Not::Generate(CodeGen &cg, size_t retval) const {
1135     cg.Gen(child, retval);
1136     if (retval) {
1137         cg.TakeTemp(1, false);
1138         cg.Emit(IsRefNil(child->exptype->t) ? IL_LOGNOTREF : IL_LOGNOT);
1139     }
1140 }
1141 
Generate(CodeGen & cg,size_t retval)1142 void If::Generate(CodeGen &cg, size_t retval) const {
1143     cg.Gen(condition, 1);
1144     cg.TakeTemp(1, false);
1145     bool has_else = !Is<DefaultVal>(falsepart);
1146     cg.Emit(!has_else && retval ? IL_JUMPFAILN : IL_JUMPFAIL, 0);
1147     auto loc = cg.Pos();
1148     if (has_else) {
1149         cg.Gen(truepart, retval);
1150         if (retval) cg.TakeTemp(1, true);
1151         cg.Emit(IL_JUMP, 0);
1152         auto loc2 = cg.Pos();
1153         cg.SetLabel(loc);
1154         cg.Gen(falsepart, retval);
1155         if (retval) cg.TakeTemp(1, true);
1156         cg.SetLabel(loc2);
1157     } else {
1158         assert(!retval);
1159         cg.Gen(truepart, 0);
1160         cg.SetLabel(loc);
1161     }
1162 }
1163 
Generate(CodeGen & cg,size_t retval)1164 void While::Generate(CodeGen &cg, size_t retval) const {
1165     cg.SplitAttr(cg.Pos());
1166     auto loopback = cg.Pos();
1167     cg.Gen(condition, 1);
1168     cg.TakeTemp(1, false);
1169     cg.Emit(IL_JUMPFAIL, 0);
1170     auto jumpout = cg.Pos();
1171     cg.Gen(body, 0);
1172     cg.Emit(IL_JUMP, loopback);
1173     cg.SetLabel(jumpout);
1174     cg.Dummy(retval);
1175 }
1176 
Generate(CodeGen & cg,size_t retval)1177 void For::Generate(CodeGen &cg, size_t retval) const {
1178     cg.Emit(IL_PUSHINT, -1);   // i
1179     cg.temptypestack.push_back({ type_int, LT_ANY });
1180     cg.Gen(iter, 1);
1181     cg.nested_fors++;
1182     cg.Emit(IL_JUMP, 0);
1183     auto startloop = cg.Pos();
1184     cg.SplitAttr(cg.Pos());
1185     cg.Gen(body, 0);
1186     cg.SetLabel(startloop);
1187     switch (iter->exptype->t) {
1188         case V_INT:      cg.Emit(IL_IFOR); break;
1189         case V_STRING:   cg.Emit(IL_SFOR); break;
1190         case V_VECTOR:   cg.Emit(IL_VFOR); break;
1191         default:         assert(false);
1192     }
1193     cg.Emit(startloop);
1194     cg.nested_fors--;
1195     cg.TakeTemp(2, false);
1196     cg.Dummy(retval);
1197 }
1198 
Generate(CodeGen & cg,size_t)1199 void ForLoopElem::Generate(CodeGen &cg, size_t /*retval*/) const {
1200     auto typelt = cg.temptypestack.back();
1201     switch (typelt.type->t) {
1202         case V_INT:       cg.Emit(IL_IFORELEM); break;
1203         case V_STRING:    cg.Emit(IL_SFORELEM); break;
1204         case V_VECTOR:    cg.Emit(IsRefNil(typelt.type->sub->t) ? IL_VFORELEMREF : IL_VFORELEM); break;
1205         default:          assert(false);
1206     }
1207 }
1208 
Generate(CodeGen & cg,size_t)1209 void ForLoopCounter::Generate(CodeGen &cg, size_t /*retval*/) const {
1210     cg.Emit(IL_FORLOOPI);
1211 }
1212 
Generate(CodeGen & cg,size_t retval)1213 void Switch::Generate(CodeGen &cg, size_t retval) const {
1214     // TODO: create specialized version for dense range of ints with jump table.
1215     cg.Gen(value, 1);
1216     cg.TakeTemp(1, false);
1217     auto valtlt = TypeLT { *value, 0 };
1218     vector<int> nextcase, thiscase, exitswitch;
1219     for (auto n : cases->children) {
1220         for (auto loc : nextcase) cg.SetLabel(loc);
1221         nextcase.clear();
1222         cg.temptypestack.push_back(valtlt);
1223         auto cas = AssertIs<Case>(n);
1224         for (auto c : cas->pattern->children) {
1225             auto is_last = c == cas->pattern->children.back();
1226             cg.GenDup(valtlt);
1227             auto compare_one = [&](MathOp op, Node *cn) {
1228                 cg.Gen(cn, 1);
1229                 cg.GenMathOp(value->exptype, c->exptype, value->exptype, op);
1230             };
1231             auto compare_one_jump = [&](MathOp op, Node *cn) {
1232                 compare_one(op, cn);
1233                 cg.Emit(is_last ? IL_JUMPFAIL : IL_JUMPNOFAIL, 0);
1234                 (is_last ? nextcase : thiscase).push_back(cg.Pos());
1235             };
1236             if (auto r = Is<Range>(c)) {
1237                 compare_one(MOP_GE, r->start);
1238                 cg.Emit(IL_JUMPFAIL, 0);
1239                 auto loc = cg.Pos();
1240                 if (is_last) nextcase.push_back(loc);
1241                 cg.GenDup(valtlt);
1242                 compare_one_jump(MOP_LE, r->end);
1243                 if (!is_last) cg.SetLabel(loc);
1244             } else {
1245                 // FIXME: if this is a string, will alloc a temp string object just for the sake of
1246                 // comparison. Better to create special purpose opcode to compare with const string.
1247                 compare_one_jump(MOP_EQ, c);
1248             }
1249         }
1250         for (auto loc : thiscase) cg.SetLabel(loc);
1251         thiscase.clear();
1252         cg.GenPop(valtlt);
1253         cg.TakeTemp(1, false);
1254         cg.Gen(cas->body, retval);
1255         if (retval) cg.TakeTemp(1, true);
1256         if (n != cases->children.back()) {
1257             cg.Emit(IL_JUMP, 0);
1258             exitswitch.push_back(cg.Pos());
1259         }
1260     }
1261     for (auto loc : nextcase) cg.SetLabel(loc);
1262     for (auto loc : exitswitch) cg.SetLabel(loc);
1263 }
1264 
Generate(CodeGen &,size_t)1265 void Case::Generate(CodeGen &/*cg*/, size_t /*retval*/) const {
1266     assert(false);
1267 }
1268 
Generate(CodeGen &,size_t)1269 void Range::Generate(CodeGen &/*cg*/, size_t /*retval*/) const {
1270     assert(false);
1271 }
1272 
Generate(CodeGen & cg,size_t retval)1273 void Constructor::Generate(CodeGen &cg, size_t retval) const {
1274     // FIXME: a malicious script can exploit this for a stack overflow.
1275     for (auto c : children) {
1276         cg.Gen(c, retval);
1277     }
1278     if (!retval) return;
1279     cg.TakeTemp(Arity(), true);
1280     auto offset = cg.GetTypeTableOffset(exptype);
1281     if (IsUDT(exptype->t)) {
1282         assert(exptype->udt->fields.size() == Arity());
1283         if (IsStruct(exptype->t)) {
1284             // This is now a no-op! Struct elements sit inline on the stack.
1285         } else {
1286             cg.Emit(IL_NEWOBJECT, offset);
1287         }
1288     } else {
1289         assert(exptype->t == V_VECTOR);
1290         cg.Emit(IL_NEWVEC, offset, (int)Arity());
1291     }
1292 }
1293 
Generate(CodeGen & cg,size_t retval)1294 void IsType::Generate(CodeGen &cg, size_t retval) const {
1295     cg.Gen(child, retval);
1296     // If the value was a scalar, then it always results in a compile time type check,
1297     // which means this T_IS would have been optimized out. Which means from here on we
1298     // can assume its a ref.
1299     assert(!IsUnBoxed(child->exptype->t));
1300     if (retval) {
1301         cg.TakeTemp(1, false);
1302         cg.Emit(IL_ISTYPE, cg.GetTypeTableOffset(giventype));
1303     }
1304 }
1305 
Generate(CodeGen & cg,size_t retval)1306 void EnumCoercion::Generate(CodeGen &cg, size_t retval) const {
1307     cg.Gen(child, retval);
1308     if (retval) cg.TakeTemp(1, false);
1309 }
1310 
Generate(CodeGen & cg,size_t retval)1311 void Return::Generate(CodeGen &cg, size_t retval) const {
1312     assert(!retval);
1313     (void)retval;
1314     assert(!cg.rettypes.size());
1315     if (cg.temptypestack.size()) {
1316         // We have temps on the stack from an enclosing for.
1317         // We can't actually remove these from the stack as the parent nodes still
1318         // expect them to be there.
1319         // Check that these temps are actually from for loops, to not mask bugs.
1320         assert(cg.temptypestack.size() == cg.nested_fors * 2);
1321         for (int i = (int)cg.temptypestack.size() - 1; i >= 0; i--) {
1322             cg.GenPop(cg.temptypestack[i]);
1323         }
1324     }
1325     auto nretvals = make_void ? 0 : sf->returntype->NumValues();
1326     int nretslots = 0;
1327     if (!make_void) {
1328         for (size_t i = 0; i < nretvals; i++) {
1329             nretslots += ValWidth(sf->returntype->Get(i));
1330         }
1331     }
1332     if (nretslots > MAX_RETURN_VALUES) cg.parser.Error("too many return values");
1333     if (sf->reqret) {
1334         if (!Is<DefaultVal>(child)) { cg.Gen(child, nretvals); cg.TakeTemp(nretvals, true); }
1335         else { cg.Emit(IL_PUSHNIL); assert(nretvals == 1); }
1336     } else {
1337         if (!Is<DefaultVal>(child)) cg.Gen(child, 0);
1338         nretvals = 0;
1339         nretslots = 0;
1340     }
1341     // FIXME: we could change the VM to instead work with SubFunction ids.
1342     // Note: this can only work as long as the type checker forces specialization
1343     // of the functions in between here and the function returned to.
1344     // FIXME: shouldn't need any type here if V_VOID, but nretvals is at least 1 ?
1345     cg.Emit(IL_RETURN, sf->parent->idx, nretslots);
1346 }
1347 
Generate(CodeGen & cg,size_t retval)1348 void CoClosure::Generate(CodeGen &cg, size_t retval) const {
1349     if (retval) cg.Emit(IL_COCL);
1350 }
1351 
Generate(CodeGen & cg,size_t retval)1352 void CoRoutine::Generate(CodeGen &cg, size_t retval) const {
1353     cg.Emit(IL_CORO, 0);
1354     auto loc = cg.Pos();
1355     auto sf = exptype->sf;
1356     assert(exptype->t == V_COROUTINE && sf);
1357     cg.Emit(cg.GetTypeTableOffset(exptype));
1358     // TODO: We shouldn't need to store this table for each call, instead do it once for
1359     // each function.
1360     auto num = cg.Pos();
1361     cg.Emit(0);
1362     for (auto &arg : sf->coyieldsave.v) {
1363         auto n = ValWidth(arg.sid->type);
1364         for (int i = 0; i < n; i++) {
1365             cg.Emit(arg.sid->Idx() + i);
1366             cg.code[num]++;
1367         }
1368     }
1369     cg.temptypestack.push_back(TypeLT { *this, 0 });
1370     cg.Gen(call, 1);
1371     cg.TakeTemp(2, false);
1372     cg.Emit(IL_COEND);
1373     cg.SetLabel(loc);
1374     if (!retval) cg.Emit(IL_POPREF);
1375 }
1376 
Generate(CodeGen & cg,size_t)1377 void TypeOf::Generate(CodeGen &cg, size_t /*retval*/) const {
1378     if (auto dv = Is<DefaultVal>(child)) {
1379         if (cg.node_context.size() >= 2) {
1380             auto parent = cg.node_context[cg.node_context.size() - 2];
1381             if (Is<NativeCall>(parent)) {
1382                 cg.Emit(IL_PUSHINT, cg.GetTypeTableOffset(parent->exptype));
1383                 return;
1384             }
1385         }
1386         cg.parser.Error("typeof return out of call context", dv);
1387     } else  if (auto idr = Is<IdentRef>(child)) {
1388         cg.Emit(IL_PUSHINT, cg.GetTypeTableOffset(idr->exptype));
1389     } else {
1390         auto ta = AssertIs<TypeAnnotation>(child);
1391         cg.Emit(IL_PUSHINT, cg.GetTypeTableOffset(ta->giventype));
1392     }
1393 }
1394 
1395 }  // namespace lobster
1396