1 /*
2 ** Copyright (C) 2017 Dirk-Jan C. Binnema <djcb@djcbsoftware.nl>
3 **
4 ** This library is free software; you can redistribute it and/or
5 ** modify it under the terms of the GNU Lesser General Public License
6 ** as published by the Free Software Foundation; either version 2.1
7 ** of the License, or (at your option) any later version.
8 **
9 ** This library is distributed in the hope that it will be useful,
10 ** but WITHOUT ANY WARRANTY; without even the implied warranty of
11 ** MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 ** Lesser General Public License for more details.
13 **
14 ** You should have received a copy of the GNU Lesser General Public
15 ** License along with this library; if not, write to the Free
16 ** Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
17 ** 02110-1301, USA.
18 */
19
20 #include <vector>
21 #include <glib.h>
22 #include <iostream>
23 #include <sstream>
24
25 #include "mu-tokenizer.hh"
26
27 struct Case {
28 const char *str;
29 const Mu::Tokens tokens;
30 };
31
32 using CaseVec = std::vector<Case>;
33
34 using namespace Mu;
35 using TT = Token::Type;
36
37 static void
test_cases(const CaseVec & cases)38 test_cases(const CaseVec& cases)
39 {
40 for (const auto& casus : cases ) {
41 const auto tokens = tokenize (casus.str);
42
43 g_assert_cmpuint ((guint)tokens.size(),==,(guint)casus.tokens.size());
44 for (size_t u = 0; u != tokens.size(); ++u) {
45 if (g_test_verbose()) {
46 std::cerr << "case " << u << " " << casus.str << std::endl;
47 std::cerr << "exp: '" << casus.tokens[u] << "'" << std::endl;
48 std::cerr << "got: '" << tokens[u] << "'" << std::endl;
49
50 }
51 g_assert_true (tokens[u] == casus.tokens[u]);
52 }
53 }
54 }
55
56 static void
test_basic()57 test_basic ()
58 {
59 CaseVec cases = {
60 { "", {} },
61
62 { "foo", Tokens{Token{3, TT::Data, "foo"}}},
63
64 { "foo bar cuux", Tokens{Token{3, TT::Data, "foo"},
65 Token{7, TT::Data, "bar"},
66 Token{12, TT::Data, "cuux"}}},
67
68 { "\"foo bar\"", Tokens{ Token{9, TT::Data, "foo bar"}}},
69
70 // ie. ignore missing closing '"'
71 { "\"foo bar", Tokens{ Token{8, TT::Data, "foo bar"}}},
72
73 };
74
75 test_cases (cases);
76 }
77
78 static void
test_specials()79 test_specials ()
80 {
81 CaseVec cases = {
82 { ")*(", Tokens{Token{1, TT::Close, ")"},
83 Token{2, TT::Data, "*"},
84 Token{3, TT::Open, "("}}},
85 { "\")*(\"", Tokens{Token{5, TT::Data, ")*("}}},
86 };
87
88 test_cases (cases);
89 }
90
91
92 static void
test_ops()93 test_ops ()
94 {
95 CaseVec cases = {
96 { "foo and bar oR cuux XoR fnorb",
97 Tokens{Token{3, TT::Data, "foo"},
98 Token{7, TT::And, "and"},
99 Token{11, TT::Data, "bar"},
100 Token{14, TT::Or, "oR"},
101 Token{19, TT::Data, "cuux"},
102 Token{23, TT::Xor, "XoR"},
103 Token{29, TT::Data, "fnorb"}}},
104 { "NOT (aap or mies)",
105 Tokens{Token{3, TT::Not, "NOT"},
106 Token{5, TT::Open, "("},
107 Token{8, TT::Data, "aap"},
108 Token{11, TT::Or, "or"},
109 Token{16, TT::Data, "mies"},
110 Token{17, TT::Close, ")"}}}
111 };
112
113
114 test_cases (cases);
115 }
116
117
118 static void
test_escape()119 test_escape ()
120 {
121 CaseVec cases = {
122 { "foo\"bar\"", Tokens{Token{8, TT::Data, "foobar"}}},
123 { "\"fnorb\"", Tokens{Token{7, TT::Data, "fnorb"}}},
124 { "\\\"fnorb\\\"", Tokens{Token{9, TT::Data, "fnorb"}}},
125 { "foo\\\"bar\\\"", Tokens{Token{10, TT::Data, "foobar"}}}
126 };
127
128 test_cases (cases);
129 }
130
131
132 static void
test_to_string()133 test_to_string ()
134 {
135 std::stringstream ss;
136 for (auto&& t: tokenize ("foo and bar xor not cuux or fnorb"))
137 ss << t << ' ';
138
139 g_assert_true (ss.str() ==
140 "3: <data> [foo] 7: <and> [and] 11: <data> [bar] "
141 "15: <xor> [xor] 19: <not> [not] 24: <data> [cuux] "
142 "27: <or> [or] 33: <data> [fnorb] ");
143 }
144
145
146 int
main(int argc,char * argv[])147 main (int argc, char *argv[])
148 {
149 g_test_init (&argc, &argv, NULL);
150
151 g_test_add_func ("/tokens/basic", test_basic);
152 g_test_add_func ("/tokens/specials", test_specials);
153 g_test_add_func ("/tokens/ops", test_ops);
154 g_test_add_func ("/tokens/escape", test_escape);
155 g_test_add_func ("/tokens/to-string", test_to_string);
156
157 return g_test_run ();
158 }
159