1 /*
2  * Copyright 2017 Google Inc.
3  *
4  * Use of this source code is governed by a BSD-style license that can be
5  * found in the LICENSE file.
6  */
7 
8 #include "src/sksl/lex/NFAtoDFA.h"
9 #include "src/sksl/lex/RegexParser.h"
10 
11 #include <fstream>
12 #include <sstream>
13 #include <string>
14 
15 /**
16  * Processes a .lex file and produces .h and .cpp files which implement a lexical analyzer. The .lex
17  * file is a text file with one token definition per line. Each line is of the form:
18  * <TOKEN_NAME> = <pattern>
19  * where <pattern> is either a regular expression (e.g [0-9]) or a double-quoted literal string.
20  */
21 
22 static constexpr const char* HEADER =
23     "/*\n"
24     " * Copyright 2017 Google Inc.\n"
25     " *\n"
26     " * Use of this source code is governed by a BSD-style license that can be\n"
27     " * found in the LICENSE file.\n"
28     " */\n"
29     "/*****************************************************************************************\n"
30     " ******************** This file was generated by sksllex. Do not edit. *******************\n"
31     " *****************************************************************************************/\n";
32 
writeH(const DFA & dfa,const char * lexer,const char * token,const std::vector<std::string> & tokens,const char * hPath)33 void writeH(const DFA& dfa, const char* lexer, const char* token,
34             const std::vector<std::string>& tokens, const char* hPath) {
35     std::ofstream out(hPath);
36     SkASSERT(out.good());
37     out << HEADER;
38     out << "#ifndef SKSL_" << lexer << "\n";
39     out << "#define SKSL_" << lexer << "\n";
40     out << "#include <cstddef>\n";
41     out << "#include <cstdint>\n";
42     out << "namespace SkSL {\n";
43     out << "\n";
44     out << "struct " << token << " {\n";
45     out << "    enum Kind {\n";
46     for (const std::string& t : tokens) {
47         out << "        #undef " << t << "\n";
48         out << "        " << t << ",\n";
49     }
50     out << "    };\n";
51     out << "\n";
52     out << "    " << token << "()\n";
53     out << "    : fKind(Kind::INVALID)\n";
54     out << "    , fOffset(-1)\n";
55     out << "    , fLength(-1) {}\n";
56     out << "\n";
57     out << "    " << token << "(Kind kind, int32_t offset, int32_t length)\n";
58     out << "    : fKind(kind)\n";
59     out << "    , fOffset(offset)\n";
60     out << "    , fLength(length) {}\n";
61     out << "\n";
62     out << "    Kind fKind;\n";
63     out << "    int fOffset;\n";
64     out << "    int fLength;\n";
65     out << "};\n";
66     out << "\n";
67     out << "class " << lexer << " {\n";
68     out << "public:\n";
69     out << "    void start(const char* text, int32_t length) {\n";
70     out << "        fText = text;\n";
71     out << "        fLength = length;\n";
72     out << "        fOffset = 0;\n";
73     out << "    }\n";
74     out << "\n";
75     out << "    " << token << " next();\n";
76     out << "\n";
77     out << "private:\n";
78     out << "    const char* fText;\n";
79     out << "    int32_t fLength;\n";
80     out << "    int32_t fOffset;\n";
81     out << "};\n";
82     out << "\n";
83     out << "} // namespace\n";
84     out << "#endif\n";
85 }
86 
writeCPP(const DFA & dfa,const char * lexer,const char * token,const char * include,const char * cppPath)87 void writeCPP(const DFA& dfa, const char* lexer, const char* token, const char* include,
88               const char* cppPath) {
89     std::ofstream out(cppPath);
90     SkASSERT(out.good());
91     out << HEADER;
92     out << "#include \"" << include << "\"\n";
93     out << "\n";
94     out << "namespace SkSL {\n";
95     out << "\n";
96 
97     size_t states = 0;
98     for (const auto& row : dfa.fTransitions) {
99         states = std::max(states, row.size());
100     }
101     // arbitrarily-chosen character which is greater than START_CHAR and should not appear in actual
102     // input
103     out << "static const uint8_t INVALID_CHAR = 18;";
104     out << "static int8_t mappings[" << dfa.fCharMappings.size() << "] = {\n    ";
105     const char* separator = "";
106     for (int m : dfa.fCharMappings) {
107         out << separator << std::to_string(m);
108         separator = ", ";
109     }
110     out << "\n};\n";
111     out << "static int16_t transitions[" << dfa.fTransitions.size() << "][" << states << "] = {\n";
112     for (size_t c = 0; c < dfa.fTransitions.size(); ++c) {
113         out << "    {";
114         for (size_t j = 0; j < states; ++j) {
115             if ((size_t) c < dfa.fTransitions.size() && j < dfa.fTransitions[c].size()) {
116                 out << " " << dfa.fTransitions[c][j] << ",";
117             } else {
118                 out << " 0,";
119             }
120         }
121         out << " },\n";
122     }
123     out << "};\n";
124     out << "\n";
125 
126     out << "static int8_t accepts[" << states << "] = {";
127     for (size_t i = 0; i < states; ++i) {
128         if (i < dfa.fAccepts.size()) {
129             out << " " << dfa.fAccepts[i] << ",";
130         } else {
131             out << " " << INVALID << ",";
132         }
133     }
134     out << " };\n";
135     out << "\n";
136 
137     out << token << " " << lexer << "::next() {\n";
138     out << "    // note that we cheat here: normally a lexer needs to worry about the case\n";
139     out << "    // where a token has a prefix which is not itself a valid token - for instance, \n";
140     out << "    // maybe we have a valid token 'while', but 'w', 'wh', etc. are not valid\n";
141     out << "    // tokens. Our grammar doesn't have this property, so we can simplify the logic\n";
142     out << "    // a bit.\n";
143     out << "    int32_t startOffset = fOffset;\n";
144     out << "    if (startOffset == fLength) {\n";
145     out << "        return " << token << "(" << token << "::END_OF_FILE, startOffset, 0);\n";
146     out << "    }\n";
147     out << "    int16_t state = 1;\n";
148     out << "    for (;;) {\n";
149     out << "        if (fOffset >= fLength) {\n";
150     out << "            if (accepts[state] == -1) {\n";
151     out << "                return Token(Token::END_OF_FILE, startOffset, 0);\n";
152     out << "            }\n";
153     out << "            break;\n";
154     out << "        }\n";
155     out << "        uint8_t c = (uint8_t) fText[fOffset];";
156     out << "        if (c <= 8 || c >= " << dfa.fCharMappings.size() << ") {";
157     out << "            c = INVALID_CHAR;";
158     out << "        }";
159     out << "        int16_t newState = transitions[mappings[c]][state];\n";
160     out << "        if (!newState) {\n";
161     out << "            break;\n";
162     out << "        }\n";
163     out << "        state = newState;";
164     out << "        ++fOffset;\n";
165     out << "    }\n";
166     out << "    Token::Kind kind = (" << token << "::Kind) accepts[state];\n";
167     out << "    return " << token << "(kind, startOffset, fOffset - startOffset);\n";
168     out << "}\n";
169     out << "\n";
170     out << "} // namespace\n";
171 }
172 
process(const char * inPath,const char * lexer,const char * token,const char * hPath,const char * cppPath)173 void process(const char* inPath, const char* lexer, const char* token, const char* hPath,
174              const char* cppPath) {
175     NFA nfa;
176     std::vector<std::string> tokens;
177     tokens.push_back("END_OF_FILE");
178     std::string line;
179     std::ifstream in(inPath);
180     while (std::getline(in, line)) {
181         std::istringstream split(line);
182         std::string name, delimiter, pattern;
183         if (split >> name >> delimiter >> pattern) {
184             SkASSERT(split.eof());
185             SkASSERT(name != "");
186             SkASSERT(delimiter == "=");
187             SkASSERT(pattern != "");
188             tokens.push_back(name);
189             if (pattern[0] == '"') {
190                 SkASSERT(pattern.size() > 2 && pattern[pattern.size() - 1] == '"');
191                 RegexNode node = RegexNode(RegexNode::kChar_Kind, pattern[1]);
192                 for (size_t i = 2; i < pattern.size() - 1; ++i) {
193                     node = RegexNode(RegexNode::kConcat_Kind, node,
194                                      RegexNode(RegexNode::kChar_Kind, pattern[i]));
195                 }
196                 nfa.addRegex(node);
197             }
198             else {
199                 nfa.addRegex(RegexParser().parse(pattern));
200             }
201         }
202     }
203     NFAtoDFA converter(&nfa);
204     DFA dfa = converter.convert();
205     writeH(dfa, lexer, token, tokens, hPath);
206     writeCPP(dfa, lexer, token, (std::string("src/sksl/SkSL") + lexer + ".h").c_str(), cppPath);
207 }
208 
main(int argc,const char ** argv)209 int main(int argc, const char** argv) {
210     if (argc != 6) {
211         printf("usage: sksllex <input.lex> <lexername> <tokenname> <output.h> <output.cpp>\n");
212         exit(1);
213     }
214     process(argv[1], argv[2], argv[3], argv[4], argv[5]);
215     return 0;
216 }
217