1 /*
2  * Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
3  * Use of this file is governed by the BSD 3-clause license that
4  * can be found in the LICENSE.txt file in the project root.
5  */
6 package org.antlr.v4.test.runtime.go;
7 
8 import org.antlr.v4.Tool;
9 import org.antlr.v4.automata.ATNFactory;
10 import org.antlr.v4.automata.ATNPrinter;
11 import org.antlr.v4.automata.LexerATNFactory;
12 import org.antlr.v4.automata.ParserATNFactory;
13 import org.antlr.v4.codegen.CodeGenerator;
14 import org.antlr.v4.runtime.ANTLRInputStream;
15 import org.antlr.v4.runtime.CharStream;
16 import org.antlr.v4.runtime.CommonToken;
17 import org.antlr.v4.runtime.CommonTokenStream;
18 import org.antlr.v4.runtime.IntStream;
19 import org.antlr.v4.runtime.Lexer;
20 import org.antlr.v4.runtime.RuleContext;
21 import org.antlr.v4.runtime.Token;
22 import org.antlr.v4.runtime.TokenSource;
23 import org.antlr.v4.runtime.TokenStream;
24 import org.antlr.v4.runtime.WritableToken;
25 import org.antlr.v4.runtime.atn.ATN;
26 import org.antlr.v4.runtime.atn.ATNDeserializer;
27 import org.antlr.v4.runtime.atn.ATNSerializer;
28 import org.antlr.v4.runtime.atn.ATNState;
29 import org.antlr.v4.runtime.atn.LexerATNSimulator;
30 import org.antlr.v4.runtime.dfa.DFA;
31 import org.antlr.v4.runtime.misc.IntegerList;
32 import org.antlr.v4.runtime.misc.Interval;
33 import org.antlr.v4.semantics.SemanticPipeline;
34 import org.antlr.v4.test.runtime.ErrorQueue;
35 import org.antlr.v4.test.runtime.RuntimeTestSupport;
36 import org.antlr.v4.test.runtime.StreamVacuum;
37 import org.antlr.v4.tool.ANTLRMessage;
38 import org.antlr.v4.tool.DOTGenerator;
39 import org.antlr.v4.tool.Grammar;
40 import org.antlr.v4.tool.GrammarSemanticsMessage;
41 import org.antlr.v4.tool.LexerGrammar;
42 import org.antlr.v4.tool.Rule;
43 import org.stringtemplate.v4.ST;
44 import org.stringtemplate.v4.STGroup;
45 import org.stringtemplate.v4.STGroupString;
46 
47 import java.io.File;
48 import java.io.FileFilter;
49 import java.io.FileInputStream;
50 import java.io.FileOutputStream;
51 import java.io.IOException;
52 import java.io.InputStream;
53 import java.io.OutputStream;
54 import java.net.URL;
55 import java.util.ArrayList;
56 import java.util.Arrays;
57 import java.util.Collections;
58 import java.util.HashSet;
59 import java.util.LinkedHashMap;
60 import java.util.List;
61 import java.util.Map;
62 import java.util.Set;
63 import java.util.TreeMap;
64 
65 import static junit.framework.TestCase.assertEquals;
66 import static junit.framework.TestCase.assertFalse;
67 import static junit.framework.TestCase.assertNotNull;
68 import static junit.framework.TestCase.assertTrue;
69 import static org.antlr.v4.test.runtime.BaseRuntimeTest.antlrOnString;
70 import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
71 import static org.junit.Assert.assertArrayEquals;
72 
73 public class BaseGoTest implements RuntimeTestSupport {
74 	public File overall_tmpdir = null;
75 	public File tmpdir = null; // this is where the parser package is stored, typically inside the tmpdir
76 	private static File tmpGopath = null;
77 	private static final String GO_RUNTIME_IMPORT_PATH = "github.com/antlr/antlr4/runtime/Go/antlr"; // TODO: Change this before merging with upstream
78 
79 	/**
80 	 * If error during parser execution, store stderr here; can't return stdout
81 	 * and stderr. This doesn't trap errors from running antlr.
82 	 */
83 	protected String stderrDuringParse;
84 
85 	/** Errors found while running antlr */
86 	protected StringBuilder antlrToolErrors;
87 
88 	/**
89 	 * Copies all files from go runtime to a temporary folder that is inside a valid GOPATH project structure.
90 	 */
groupSetUp()91 	public static void groupSetUp() throws Exception {
92 		tmpGopath = new File(System.getProperty("java.io.tmpdir"), "antlr-goruntime-tmpgopath-"
93 			+ Long.toHexString(System.currentTimeMillis()));
94 
95 		ArrayList<String> pathsegments = new ArrayList<String>();
96 		pathsegments.add("src");
97 		pathsegments.addAll(Arrays.asList(GO_RUNTIME_IMPORT_PATH.split("/")));
98 
99 		File tmpPackageDir = tmpGopath;
100 		for (String pathsegment : pathsegments) {
101 			tmpPackageDir = new File(tmpPackageDir, pathsegment);
102 		}
103 		if (!tmpPackageDir.mkdirs()) {
104 			throw new Exception("Could not create temp go runtime package dirs!");
105 		}
106 
107 		File[] runtimeFiles = locateRuntime().listFiles();
108 		if (runtimeFiles == null) {
109 			throw new Exception("Go runtime file list is empty.");
110 		}
111 		for (File runtimeFile : runtimeFiles) {
112 			File dest = new File(tmpPackageDir, runtimeFile.getName());
113 			copyFile(runtimeFile, dest);
114 		}
115 
116 		cacheGoRuntime(tmpPackageDir);
117 	}
118 
119 	@Override
testTearDown()120 	public void testTearDown() throws Exception {
121 	}
122 
123 	@Override
getTmpDir()124 	public String getTmpDir() {
125 		return tmpdir.getPath();
126 	}
127 
128 	@Override
getStdout()129 	public String getStdout() {
130 		return null;
131 	}
132 
133 	@Override
getParseErrors()134 	public String getParseErrors() {
135 		return stderrDuringParse;
136 	}
137 
138 	@Override
getANTLRToolErrors()139 	public String getANTLRToolErrors() {
140 		if ( antlrToolErrors.length()==0 ) {
141 			return null;
142 		}
143 		return antlrToolErrors.toString();
144 	}
145 
groupTearDown()146 	public static void groupTearDown() throws Exception {
147 		eraseDirectory(tmpGopath);
148 	}
149 
cacheGoRuntime(File tmpPackageDir)150 	private static void cacheGoRuntime(File tmpPackageDir) throws Exception {
151 		String goExecutable = locateGo();
152 		ProcessBuilder pb = new ProcessBuilder(goExecutable, "install", "-x");
153 		pb.directory(tmpPackageDir);
154 		pb.environment().put("GOPATH", tmpGopath.getPath());
155 		pb.redirectErrorStream(true);
156 		Process process = pb.start();
157 		StreamVacuum sucker = new StreamVacuum(process.getInputStream());
158 		sucker.start();
159 		int exit = process.waitFor();
160 		sucker.join();
161 		if (exit != 0) {
162 			throw new Exception("Non-zero exit while caching go runtime, output: " + sucker.toString());
163 		}
164 	}
165 
copyFile(File source, File dest)166 	private static void copyFile(File source, File dest) throws IOException {
167 		InputStream is = new FileInputStream(source);
168 		OutputStream os = new FileOutputStream(dest);
169 		byte[] buf = new byte[4 << 10];
170 		int l;
171 		while ((l = is.read(buf)) > -1) {
172 			os.write(buf, 0, l);
173 		}
174 		is.close();
175 		os.close();
176 	}
177 
testSetUp()178 	public void testSetUp() throws Exception {
179 		// new output dir for each test
180 		String prop = System.getProperty("antlr-go-test-dir");
181 		if (prop != null && prop.length() > 0) {
182 			overall_tmpdir = new File(prop);
183 		}
184 		else {
185 			String threadName = Thread.currentThread().getName();
186 			overall_tmpdir = new File(System.getProperty("java.io.tmpdir"),
187 			                          getClass().getSimpleName()+"-"+threadName+"-"+System.currentTimeMillis());
188 		}
189 
190 		if ( overall_tmpdir.exists())
191 			this.eraseDirectory(overall_tmpdir);
192 
193 		tmpdir = new File(overall_tmpdir, "parser");
194 
195 		if ( tmpdir.exists()) {
196 			this.eraseDirectory(tmpdir);
197 		}
198 		antlrToolErrors = new StringBuilder();
199 	}
200 
newTool(String[] args)201 	protected org.antlr.v4.Tool newTool(String[] args) {
202 		return new Tool(args);
203 	}
204 
newTool()205 	protected Tool newTool() {
206 		return new Tool(new String[]{"-o", tmpdir.getPath()});
207 	}
208 
createATN(Grammar g, boolean useSerializer)209 	protected ATN createATN(Grammar g, boolean useSerializer) {
210 		if (g.atn == null) {
211 			semanticProcess(g);
212 			assertEquals(0, g.tool.getNumErrors());
213 
214 			ParserATNFactory f;
215 			if (g.isLexer()) {
216 				f = new LexerATNFactory((LexerGrammar) g);
217 			}
218 			else {
219 				f = new ParserATNFactory(g);
220 			}
221 
222 			g.atn = f.createATN();
223 			assertEquals(0, g.tool.getNumErrors());
224 		}
225 
226 		ATN atn = g.atn;
227 		if (useSerializer) {
228 			char[] serialized = ATNSerializer.getSerializedAsChars(atn);
229 			return new ATNDeserializer().deserialize(serialized);
230 		}
231 
232 		return atn;
233 	}
234 
semanticProcess(Grammar g)235 	protected void semanticProcess(Grammar g) {
236 		if (g.ast != null && !g.ast.hasErrors) {
237 			System.out.println(g.ast.toStringTree());
238 			Tool antlr = new Tool();
239 			SemanticPipeline sem = new SemanticPipeline(g);
240 			sem.process();
241 			if (g.getImportedGrammars() != null) { // process imported grammars
242 				// (if any)
243 				for (Grammar imp : g.getImportedGrammars()) {
244 					antlr.processNonCombinedGrammar(imp, false);
245 				}
246 			}
247 		}
248 	}
249 
getTypesFromString(Grammar g, String expecting)250 	IntegerList getTypesFromString(Grammar g, String expecting) {
251 		IntegerList expectingTokenTypes = new IntegerList();
252 		if (expecting != null && !expecting.trim().isEmpty()) {
253 			for (String tname : expecting.replace(" ", "").split(",")) {
254 				int ttype = g.getTokenType(tname);
255 				expectingTokenTypes.add(ttype);
256 			}
257 		}
258 		return expectingTokenTypes;
259 	}
260 
getTokenTypesViaATN(String input, LexerATNSimulator lexerATN)261 	public IntegerList getTokenTypesViaATN(String input,
262 	                                       LexerATNSimulator lexerATN) {
263 		ANTLRInputStream in = new ANTLRInputStream(input);
264 		IntegerList tokenTypes = new IntegerList();
265 		int ttype;
266 		do {
267 			ttype = lexerATN.match(in, Lexer.DEFAULT_MODE);
268 			tokenTypes.add(ttype);
269 		} while (ttype != Token.EOF);
270 		return tokenTypes;
271 	}
272 
getTokenTypes(LexerGrammar lg, ATN atn, CharStream input)273 	public List<String> getTokenTypes(LexerGrammar lg, ATN atn, CharStream input) {
274 		LexerATNSimulator interp = new LexerATNSimulator(atn,
275 		                                                 new DFA[] { new DFA(
276 			                                                 atn.modeToStartState.get(Lexer.DEFAULT_MODE)) }, null);
277 		List<String> tokenTypes = new ArrayList<String>();
278 		int ttype;
279 		boolean hitEOF = false;
280 		do {
281 			if (hitEOF) {
282 				tokenTypes.add("EOF");
283 				break;
284 			}
285 			int t = input.LA(1);
286 			ttype = interp.match(input, Lexer.DEFAULT_MODE);
287 			if (ttype == Token.EOF) {
288 				tokenTypes.add("EOF");
289 			}
290 			else {
291 				tokenTypes.add(lg.typeToTokenList.get(ttype));
292 			}
293 
294 			if (t == IntStream.EOF) {
295 				hitEOF = true;
296 			}
297 		} while (ttype != Token.EOF);
298 		return tokenTypes;
299 	}
300 
execLexer(String grammarFileName, String grammarStr, String lexerName, String input)301 	protected String execLexer(String grammarFileName, String grammarStr,
302 	                           String lexerName, String input) {
303 		return execLexer(grammarFileName, grammarStr, lexerName, input, false);
304 	}
305 
306 	@Override
execLexer(String grammarFileName, String grammarStr, String lexerName, String input, boolean showDFA)307 	public  String execLexer(String grammarFileName, String grammarStr,
308 	                         String lexerName, String input, boolean showDFA) {
309 		boolean success = rawGenerateAndBuildRecognizer(grammarFileName,
310 		                                                grammarStr, null, lexerName, "-no-listener");
311 		assertTrue(success);
312 		writeFile(overall_tmpdir.toString(), "input", input);
313 		writeLexerTestFile(lexerName, showDFA);
314 		String output = execModule("Test.go");
315 		return output;
316 	}
317 //
318 //	public String execParser(String grammarFileName, String grammarStr,
319 //	                         String parserName, String lexerName, String listenerName,
320 //	                         String visitorName, String startRuleName, String input,
321 //	                         boolean debug)
322 //	{
323 //		boolean success = rawGenerateAndBuildRecognizer(grammarFileName,
324 //		                                                grammarStr, parserName, lexerName, "-visitor");
325 //		assertTrue(success);
326 //		writeFile(overall_tmpdir, "input", input);
327 //		rawBuildRecognizerTestFile(parserName, lexerName, listenerName,
328 //		                           visitorName, startRuleName, debug);
329 //		return execRecognizer();
330 //	}
331 
332 	@Override
execParser(String grammarFileName, String grammarStr, String parserName, String lexerName, String listenerName, String visitorName, String startRuleName, String input, boolean showDiagnosticErrors)333 	public String execParser(String grammarFileName, String grammarStr,
334 	                         String parserName, String lexerName, String listenerName,
335 	                         String visitorName, String startRuleName, String input,
336 	                         boolean showDiagnosticErrors)
337 	{
338 		boolean success = rawGenerateAndBuildRecognizer(grammarFileName,
339 		                                                grammarStr, parserName, lexerName, "-visitor");
340 		assertTrue(success);
341 		writeFile(overall_tmpdir.toString(), "input", input);
342 		rawBuildRecognizerTestFile(parserName, lexerName, listenerName,
343 		                           visitorName, startRuleName, showDiagnosticErrors);
344 		return execRecognizer();
345 	}
346 
347 	/** Return true if all is well */
rawGenerateAndBuildRecognizer(String grammarFileName, String grammarStr, String parserName, String lexerName, String... extraOptions)348 	protected boolean rawGenerateAndBuildRecognizer(String grammarFileName,
349 	                                                String grammarStr, String parserName, String lexerName,
350 	                                                String... extraOptions) {
351 		return rawGenerateAndBuildRecognizer(grammarFileName, grammarStr,
352 		                                     parserName, lexerName, false, extraOptions);
353 	}
354 
355 	/** Return true if all is well */
rawGenerateAndBuildRecognizer(String grammarFileName, String grammarStr, String parserName, String lexerName, boolean defaultListener, String... extraOptions)356 	protected boolean rawGenerateAndBuildRecognizer(String grammarFileName,
357 	                                                String grammarStr, String parserName, String lexerName,
358 	                                                boolean defaultListener, String... extraOptions) {
359 		ErrorQueue equeue = antlrOnString(getTmpDir(), "Go", grammarFileName, grammarStr,
360 		                                  defaultListener, extraOptions);
361 		if (!equeue.errors.isEmpty()) {
362 			return false;
363 		}
364 		return true;
365 	}
366 
rawBuildRecognizerTestFile(String parserName, String lexerName, String listenerName, String visitorName, String parserStartRuleName, boolean debug)367 	protected void rawBuildRecognizerTestFile(String parserName,
368 	                                          String lexerName, String listenerName, String visitorName,
369 	                                          String parserStartRuleName, boolean debug) {
370 		this.stderrDuringParse = null;
371 		if (parserName == null) {
372 			writeLexerTestFile(lexerName, false);
373 		}
374 		else {
375 			writeParserTestFile(parserName, lexerName, listenerName,
376 			                    visitorName, parserStartRuleName, debug);
377 		}
378 	}
379 
execRecognizer()380 	public String execRecognizer() {
381 		return execModule("Test.go");
382 	}
383 
execModule(String fileName)384 	public String execModule(String fileName) {
385 		String goExecutable = locateGo();
386 		String modulePath = new File(overall_tmpdir, fileName).getAbsolutePath();
387 		String inputPath = new File(overall_tmpdir, "input").getAbsolutePath();
388 		try {
389 			ProcessBuilder builder = new ProcessBuilder(goExecutable, "run", modulePath, inputPath);
390 			builder.environment().put("GOPATH", tmpGopath.getPath());
391 			builder.directory(overall_tmpdir);
392 			Process process = builder.start();
393 			StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
394 			StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
395 			stdoutVacuum.start();
396 			stderrVacuum.start();
397 			process.waitFor();
398 			stdoutVacuum.join();
399 			stderrVacuum.join();
400 			String output = stdoutVacuum.toString();
401 			if ( output.length()==0 ) {
402 				output = null;
403 			}
404 			if (stderrVacuum.toString().length() > 0) {
405 				this.stderrDuringParse = stderrVacuum.toString();
406 			}
407 			return output;
408 		}
409 		catch (Exception e) {
410 			System.err.println("can't exec recognizer");
411 			e.printStackTrace(System.err);
412 		}
413 		return null;
414 	}
415 
locateTool(String tool)416 	private static String locateTool(String tool) {
417 		ArrayList<String> paths = new ArrayList<String>(); // default cap is about right
418 
419 		// GOROOT should have priority if set
420 		String goroot = System.getenv("GOROOT");
421 		if (goroot != null) {
422 			paths.add(goroot + File.separatorChar + "bin");
423 		}
424 
425 		String pathEnv = System.getenv("PATH");
426 		if (pathEnv != null) {
427 			paths.addAll(Arrays.asList(pathEnv.split(File.pathSeparator)));
428 		}
429 
430 		// OS specific default locations of binary dist as last resort
431 		paths.add("/usr/local/go/bin");
432 		paths.add("c:\\Go\\bin");
433 
434 		for (String path : paths) {
435 			File candidate = new File(new File(path), tool);
436 			if (candidate.exists()) {
437 				return candidate.getPath();
438 			}
439 			candidate = new File(new File(path), tool+".exe");
440 			if (candidate.exists()) {
441 				return candidate.getPath();
442 			}
443 		}
444 		return null;
445 	}
446 
locateGo()447 	private static String locateGo() {
448 		String propName = "antlr-go";
449 		String prop = System.getProperty(propName);
450 		if (prop == null || prop.length() == 0) {
451 			prop = locateTool("go");
452 		}
453 		if (prop == null) {
454 			throw new RuntimeException("Missing system property:" + propName);
455 		}
456 		return prop;
457 	}
458 
locateRuntime()459 	private static File locateRuntime() {
460 		final ClassLoader loader = Thread.currentThread().getContextClassLoader();
461 		final URL runtimeSrc = loader.getResource("Go");
462 		if ( runtimeSrc==null ) {
463 			throw new RuntimeException("Cannot find Go ANTLR runtime");
464 		}
465 		File runtimeDir = new File(runtimeSrc.getPath(), "antlr");
466 		if (!runtimeDir.exists()) {
467 			throw new RuntimeException("Cannot find Go ANTLR runtime");
468 		}
469 		return runtimeDir;
470 	}
471 
472 	// void ambig(List<Message> msgs, int[] expectedAmbigAlts, String
473 	// expectedAmbigInput)
474 	// throws Exception
475 	// {
476 	// ambig(msgs, 0, expectedAmbigAlts, expectedAmbigInput);
477 	// }
478 
479 	// void ambig(List<Message> msgs, int i, int[] expectedAmbigAlts, String
480 	// expectedAmbigInput)
481 	// throws Exception
482 	// {
483 	// List<Message> amsgs = getMessagesOfType(msgs, AmbiguityMessage.class);
484 	// AmbiguityMessage a = (AmbiguityMessage)amsgs.get(i);
485 	// if ( a==null ) assertNull(expectedAmbigAlts);
486 	// else {
487 	// assertEquals(a.conflictingAlts.toString(),
488 	// Arrays.toString(expectedAmbigAlts));
489 	// }
490 	// assertEquals(expectedAmbigInput, a.input);
491 	// }
492 
493 	// void unreachable(List<Message> msgs, int[] expectedUnreachableAlts)
494 	// throws Exception
495 	// {
496 	// unreachable(msgs, 0, expectedUnreachableAlts);
497 	// }
498 
499 	// void unreachable(List<Message> msgs, int i, int[]
500 	// expectedUnreachableAlts)
501 	// throws Exception
502 	// {
503 	// List<Message> amsgs = getMessagesOfType(msgs,
504 	// UnreachableAltsMessage.class);
505 	// UnreachableAltsMessage u = (UnreachableAltsMessage)amsgs.get(i);
506 	// if ( u==null ) assertNull(expectedUnreachableAlts);
507 	// else {
508 	// assertEquals(u.conflictingAlts.toString(),
509 	// Arrays.toString(expectedUnreachableAlts));
510 	// }
511 	// }
512 
getMessagesOfType(List<ANTLRMessage> msgs, Class<? extends ANTLRMessage> c)513 	List<ANTLRMessage> getMessagesOfType(List<ANTLRMessage> msgs,
514 	                                     Class<? extends ANTLRMessage> c) {
515 		List<ANTLRMessage> filtered = new ArrayList<ANTLRMessage>();
516 		for (ANTLRMessage m : msgs) {
517 			if (m.getClass() == c)
518 				filtered.add(m);
519 		}
520 		return filtered;
521 	}
522 
checkRuleATN(Grammar g, String ruleName, String expecting)523 	void checkRuleATN(Grammar g, String ruleName, String expecting) {
524 		ParserATNFactory f = new ParserATNFactory(g);
525 		ATN atn = f.createATN();
526 
527 		DOTGenerator dot = new DOTGenerator(g);
528 		System.out
529 			.println(dot.getDOT(atn.ruleToStartState[g.getRule(ruleName).index]));
530 
531 		Rule r = g.getRule(ruleName);
532 		ATNState startState = atn.ruleToStartState[r.index];
533 		ATNPrinter serializer = new ATNPrinter(g, startState);
534 		String result = serializer.asString();
535 
536 		// System.out.print(result);
537 		assertEquals(expecting, result);
538 	}
539 
testActions(String templates, String actionName, String action, String expected)540 	public void testActions(String templates, String actionName, String action,
541 	                        String expected) throws org.antlr.runtime.RecognitionException {
542 		int lp = templates.indexOf('(');
543 		String name = templates.substring(0, lp);
544 		STGroup group = new STGroupString(templates);
545 		ST st = group.getInstanceOf(name);
546 		st.add(actionName, action);
547 		String grammar = st.render();
548 		ErrorQueue equeue = new ErrorQueue();
549 		Grammar g = new Grammar(grammar, equeue);
550 		if (g.ast != null && !g.ast.hasErrors) {
551 			SemanticPipeline sem = new SemanticPipeline(g);
552 			sem.process();
553 
554 			ATNFactory factory = new ParserATNFactory(g);
555 			if (g.isLexer())
556 				factory = new LexerATNFactory((LexerGrammar) g);
557 			g.atn = factory.createATN();
558 
559 			CodeGenerator gen = new CodeGenerator(g);
560 			ST outputFileST = gen.generateParser();
561 			String output = outputFileST.render();
562 			// System.out.println(output);
563 			String b = "#" + actionName + "#";
564 			int start = output.indexOf(b);
565 			String e = "#end-" + actionName + "#";
566 			int end = output.indexOf(e);
567 			String snippet = output.substring(start + b.length(), end);
568 			assertEquals(expected, snippet);
569 		}
570 		if (equeue.size() > 0) {
571 			System.err.println(equeue.toString());
572 		}
573 	}
574 
checkGrammarSemanticsError(ErrorQueue equeue, GrammarSemanticsMessage expectedMessage)575 	protected void checkGrammarSemanticsError(ErrorQueue equeue,
576 	                                          GrammarSemanticsMessage expectedMessage) throws Exception {
577 		ANTLRMessage foundMsg = null;
578 		for (int i = 0; i < equeue.errors.size(); i++) {
579 			ANTLRMessage m = equeue.errors.get(i);
580 			if (m.getErrorType() == expectedMessage.getErrorType()) {
581 				foundMsg = m;
582 			}
583 		}
584 		assertNotNull("no error; " + expectedMessage.getErrorType()
585 			              + " expected", foundMsg);
586 		assertTrue("error is not a GrammarSemanticsMessage",
587 		           foundMsg instanceof GrammarSemanticsMessage);
588 		assertEquals(Arrays.toString(expectedMessage.getArgs()),
589 		             Arrays.toString(foundMsg.getArgs()));
590 		if (equeue.size() != 1) {
591 			System.err.println(equeue);
592 		}
593 	}
594 
checkGrammarSemanticsWarning(ErrorQueue equeue, GrammarSemanticsMessage expectedMessage)595 	protected void checkGrammarSemanticsWarning(ErrorQueue equeue,
596 	                                            GrammarSemanticsMessage expectedMessage) throws Exception {
597 		ANTLRMessage foundMsg = null;
598 		for (int i = 0; i < equeue.warnings.size(); i++) {
599 			ANTLRMessage m = equeue.warnings.get(i);
600 			if (m.getErrorType() == expectedMessage.getErrorType()) {
601 				foundMsg = m;
602 			}
603 		}
604 		assertNotNull("no error; " + expectedMessage.getErrorType()
605 			              + " expected", foundMsg);
606 		assertTrue("error is not a GrammarSemanticsMessage",
607 		           foundMsg instanceof GrammarSemanticsMessage);
608 		assertEquals(Arrays.toString(expectedMessage.getArgs()),
609 		             Arrays.toString(foundMsg.getArgs()));
610 		if (equeue.size() != 1) {
611 			System.err.println(equeue);
612 		}
613 	}
614 
checkError(ErrorQueue equeue, ANTLRMessage expectedMessage)615 	protected void checkError(ErrorQueue equeue, ANTLRMessage expectedMessage)
616 		throws Exception {
617 		// System.out.println("errors="+equeue);
618 		ANTLRMessage foundMsg = null;
619 		for (int i = 0; i < equeue.errors.size(); i++) {
620 			ANTLRMessage m = equeue.errors.get(i);
621 			if (m.getErrorType() == expectedMessage.getErrorType()) {
622 				foundMsg = m;
623 			}
624 		}
625 		assertTrue("no error; " + expectedMessage.getErrorType() + " expected",
626 		           !equeue.errors.isEmpty());
627 		assertTrue("too many errors; " + equeue.errors,
628 		           equeue.errors.size() <= 1);
629 		assertNotNull(
630 			"couldn't find expected error: "
631 				+ expectedMessage.getErrorType(), foundMsg);
632 		/*
633 		 * assertTrue("error is not a GrammarSemanticsMessage", foundMsg
634 		 * instanceof GrammarSemanticsMessage);
635 		 */
636 		assertArrayEquals(expectedMessage.getArgs(), foundMsg.getArgs());
637 	}
638 
639 	public static class FilteringTokenStream extends CommonTokenStream {
FilteringTokenStream(TokenSource src)640 		public FilteringTokenStream(TokenSource src) {
641 			super(src);
642 		}
643 
644 		Set<Integer> hide = new HashSet<Integer>();
645 
646 		@Override
sync(int i)647 		protected boolean sync(int i) {
648 			if (!super.sync(i)) {
649 				return false;
650 			}
651 
652 			Token t = get(i);
653 			if (hide.contains(t.getType())) {
654 				((WritableToken) t).setChannel(Token.HIDDEN_CHANNEL);
655 			}
656 
657 			return true;
658 		}
659 
setTokenTypeChannel(int ttype, int channel)660 		public void setTokenTypeChannel(int ttype, int channel) {
661 			hide.add(ttype);
662 		}
663 	}
664 
mkdir(File dir)665 	protected void mkdir(File dir) {
666 		dir.mkdirs();
667 	}
668 
writeParserTestFile(String parserName, String lexerName, String listenerName, String visitorName, String parserStartRuleName, boolean debug)669 	protected void writeParserTestFile(String parserName, String lexerName,
670 	                                   String listenerName, String visitorName,
671 	                                   String parserStartRuleName, boolean debug) {
672 		ST outputFileST = new ST(
673 			"package main\n" +
674 				"import (\n"
675 				+"	\"github.com/antlr/antlr4/runtime/Go/antlr\"\n"
676 				+"	\"./parser\"\n"
677 				+"	\"fmt\"\n"
678 				+"	\"os\"\n"
679 				+")\n"
680 				+ "\n"
681 				+ "type TreeShapeListener struct {\n"
682 				+ "	*parser.Base<listenerName>\n"
683 				+ "}\n"
684 				+ "\n"
685 				+ "func NewTreeShapeListener() *TreeShapeListener {\n"
686 				+ "	return new(TreeShapeListener)\n"
687 				+ "}\n"
688 				+ "\n"
689 				+ "func (this *TreeShapeListener) EnterEveryRule(ctx antlr.ParserRuleContext) {\n"
690 				+ "	for i := 0; i\\<ctx.GetChildCount(); i++ {\n"
691 				+ "		child := ctx.GetChild(i)\n"
692 				+ "		parentR,ok := child.GetParent().(antlr.RuleNode)\n"
693 				+ "		if !ok || parentR.GetBaseRuleContext() != ctx.GetBaseRuleContext() {\n"
694 				+ "			panic(\"Invalid parse tree shape detected.\")\n"
695 				+ "		}\n"
696 				+ "	}\n"
697 				+ "}\n"
698 				+ "\n"
699 				+ "func main() {\n"
700 				+ "	input, err := antlr.NewFileStream(os.Args[1])\n"
701 				+ "     if err != nil {\n"
702 				+ "     	fmt.Printf(\"Failed to find file: %v\", err)\n"
703 				+ "     	return\n"
704 				+ "     }\n"
705 				+ "	lexer := parser.New<lexerName>(input)\n"
706 				+ "	stream := antlr.NewCommonTokenStream(lexer,0)\n"
707 				+ "<createParser>"
708 				+ "	p.BuildParseTrees = true\n"
709 				+ "	tree := p.<parserStartRuleName>()\n"
710 				+ "	antlr.ParseTreeWalkerDefault.Walk(NewTreeShapeListener(), tree)\n"
711 				+ "}\n");
712 
713 		ST createParserST = new ST(
714 			"	p := parser.New<parserName>(stream)\n");
715 		if (debug) {
716 			createParserST = new ST(
717 				"	p := parser.New<parserName>(stream)\n"
718 					+ "	p.AddErrorListener(antlr.NewDiagnosticErrorListener(true))\n");
719 		}
720 		outputFileST.add("createParser", createParserST);
721 		outputFileST.add("parserName", parserName);
722 		outputFileST.add("lexerName", lexerName);
723 		outputFileST.add("listenerName", listenerName);
724 		outputFileST.add("visitorName", visitorName);
725 		outputFileST.add("parserStartRuleName", parserStartRuleName.substring(0, 1).toUpperCase() + parserStartRuleName.substring(1) );
726 		writeFile(overall_tmpdir.toString(), "Test.go", outputFileST.render());
727 	}
728 
729 
730 
writeLexerTestFile(String lexerName, boolean showDFA)731 	protected void writeLexerTestFile(String lexerName, boolean showDFA) {
732 		ST outputFileST = new ST(
733 			"package main\n" +
734 				"import (\n"
735 				+ "	\"github.com/antlr/antlr4/runtime/Go/antlr\"\n"
736 				+ "	\"./parser\"\n"
737 				+ "	\"os\"\n"
738 				+ "	\"fmt\"\n"
739 				+ ")\n"
740 				+ "\n"
741 				+ "func main() {\n"
742 				+ "	input, err := antlr.NewFileStream(os.Args[1])\n"
743 				+ "     if err != nil {\n"
744 				+ "     	fmt.Printf(\"Failed to find file: %v\", err)\n"
745 				+ "     	return\n"
746 				+ "     }\n"
747 				+ "	lexer := parser.New<lexerName>(input)\n"
748 				+ "	stream := antlr.NewCommonTokenStream(lexer,0)\n"
749 				+ "	stream.Fill()\n"
750 				+ "	for _, t := range stream.GetAllTokens() {\n"
751 				+ "		fmt.Println(t)\n"
752 				+ "	}\n"
753 				+ (showDFA ? "fmt.Print(lexer.GetInterpreter().DecisionToDFA()[antlr.LexerDefaultMode].ToLexerString())\n"
754 				: "")
755 				+ "}\n"
756 				+ "\n");
757 		outputFileST.add("lexerName", lexerName);
758 		writeFile(overall_tmpdir.toString(), "Test.go", outputFileST.render());
759 	}
760 
writeRecognizer(String parserName, String lexerName, String listenerName, String visitorName, String parserStartRuleName, boolean debug)761 	public void writeRecognizer(String parserName, String lexerName,
762 	                            String listenerName, String visitorName,
763 	                            String parserStartRuleName, boolean debug) {
764 		if (parserName == null) {
765 			writeLexerTestFile(lexerName, debug);
766 		}
767 		else {
768 			writeParserTestFile(parserName, lexerName, listenerName,
769 			                    visitorName, parserStartRuleName, debug);
770 		}
771 	}
772 
eraseFilesEndingWith(final String filesEndingWith)773 	protected void eraseFilesEndingWith(final String filesEndingWith) {
774 		File[] files = overall_tmpdir.listFiles(new FileFilter() {
775 			@Override
776 			public boolean accept(File pathname) {
777 				return pathname.getName().endsWith(filesEndingWith);
778 			}
779 		});
780 		for (File file : files) {
781 			file.delete();
782 		}
783 	}
784 
eraseDirectory(File dir)785 	protected static void eraseDirectory(File dir) {
786 		File[] files = dir.listFiles();
787 		if (files != null) {
788 			for (File file : files) {
789 				if (file.isDirectory()) {
790 					eraseDirectory(file);
791 				}
792 				else {
793 					file.delete();
794 				}
795 			}
796 		}
797 		dir.delete();
798 	}
799 
eraseTempDir()800 	public void eraseTempDir() {
801 		boolean doErase = true;
802 		String propName = "antlr-go-erase-test-dir";
803 		String prop = System.getProperty(propName);
804 		if (prop != null && prop.length() > 0)
805 			doErase = Boolean.getBoolean(prop);
806 		if (doErase) {
807 			if ( overall_tmpdir.exists()) {
808 				eraseDirectory(overall_tmpdir);
809 			}
810 		}
811 	}
812 
getFirstLineOfException()813 	public String getFirstLineOfException() {
814 		if (this.stderrDuringParse == null) {
815 			return null;
816 		}
817 		String[] lines = this.stderrDuringParse.split("\n");
818 		String prefix = "Exception in thread \"main\" ";
819 		return lines[0].substring(prefix.length(), lines[0].length());
820 	}
821 
822 	/**
823 	 * When looking at a result set that consists of a Map/HashTable we cannot
824 	 * rely on the output order, as the hashing algorithm or other aspects of
825 	 * the implementation may be different on differnt JDKs or platforms. Hence
826 	 * we take the Map, convert the keys to a List, sort them and Stringify the
827 	 * Map, which is a bit of a hack, but guarantees that we get the same order
828 	 * on all systems. We assume that the keys are strings.
829 	 *
830 	 * @param m
831 	 *            The Map that contains keys we wish to return in sorted order
832 	 * @return A string that represents all the keys in sorted order.
833 	 */
sortMapToString(Map<K, V> m)834 	public <K, V> String sortMapToString(Map<K, V> m) {
835 		// Pass in crap, and get nothing back
836 		//
837 		if (m == null) {
838 			return null;
839 		}
840 
841 		System.out.println("Map toString looks like: " + m.toString());
842 
843 		// Sort the keys in the Map
844 		//
845 		TreeMap<K, V> nset = new TreeMap<K, V>(m);
846 
847 		System.out.println("Tree map looks like: " + nset.toString());
848 		return nset.toString();
849 	}
850 
realElements(List<String> elements)851 	public List<String> realElements(List<String> elements) {
852 		return elements.subList(Token.MIN_USER_TOKEN_TYPE, elements.size());
853 	}
854 
assertNotNullOrEmpty(String message, String text)855 	public void assertNotNullOrEmpty(String message, String text) {
856 		assertNotNull(message, text);
857 		assertFalse(message, text.isEmpty());
858 	}
859 
assertNotNullOrEmpty(String text)860 	public void assertNotNullOrEmpty(String text) {
861 		assertNotNull(text);
862 		assertFalse(text.isEmpty());
863 	}
864 
865 	public static class IntTokenStream implements TokenStream {
866 		IntegerList types;
867 		int p = 0;
868 
IntTokenStream(IntegerList types)869 		public IntTokenStream(IntegerList types) {
870 			this.types = types;
871 		}
872 
873 		@Override
consume()874 		public void consume() {
875 			p++;
876 		}
877 
878 		@Override
LA(int i)879 		public int LA(int i) {
880 			return LT(i).getType();
881 		}
882 
883 		@Override
mark()884 		public int mark() {
885 			return index();
886 		}
887 
888 		@Override
index()889 		public int index() {
890 			return p;
891 		}
892 
893 		@Override
release(int marker)894 		public void release(int marker) {
895 			seek(marker);
896 		}
897 
898 		@Override
seek(int index)899 		public void seek(int index) {
900 			p = index;
901 		}
902 
903 		@Override
size()904 		public int size() {
905 			return types.size();
906 		}
907 
908 		@Override
getSourceName()909 		public String getSourceName() {
910 			return null;
911 		}
912 
913 		@Override
LT(int i)914 		public Token LT(int i) {
915 			CommonToken t;
916 			int rawIndex = p + i - 1;
917 			if (rawIndex >= types.size())
918 				t = new CommonToken(Token.EOF);
919 			else
920 				t = new CommonToken(types.get(rawIndex));
921 			t.setTokenIndex(rawIndex);
922 			return t;
923 		}
924 
925 		@Override
get(int i)926 		public Token get(int i) {
927 			return new org.antlr.v4.runtime.CommonToken(types.get(i));
928 		}
929 
930 		@Override
getTokenSource()931 		public TokenSource getTokenSource() {
932 			return null;
933 		}
934 
935 		@Override
getText()936 		public String getText() {
937 			throw new UnsupportedOperationException("can't give strings");
938 		}
939 
940 		@Override
getText(Interval interval)941 		public String getText(Interval interval) {
942 			throw new UnsupportedOperationException("can't give strings");
943 		}
944 
945 		@Override
getText(RuleContext ctx)946 		public String getText(RuleContext ctx) {
947 			throw new UnsupportedOperationException("can't give strings");
948 		}
949 
950 		@Override
getText(Token start, Token stop)951 		public String getText(Token start, Token stop) {
952 			throw new UnsupportedOperationException("can't give strings");
953 		}
954 	}
955 
956 	/** Sort a list */
sort(List<T> data)957 	public <T extends Comparable<? super T>> List<T> sort(List<T> data) {
958 		List<T> dup = new ArrayList<T>();
959 		dup.addAll(data);
960 		Collections.sort(dup);
961 		return dup;
962 	}
963 
964 	/** Return map sorted by key */
sort( Map<K, V> data)965 	public <K extends Comparable<? super K>, V> LinkedHashMap<K, V> sort(
966 		Map<K, V> data) {
967 		LinkedHashMap<K, V> dup = new LinkedHashMap<K, V>();
968 		List<K> keys = new ArrayList<K>();
969 		keys.addAll(data.keySet());
970 		Collections.sort(keys);
971 		for (K k : keys) {
972 			dup.put(k, data.get(k));
973 		}
974 		return dup;
975 	}
976 }
977