1 /*
2  * Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
3  * Use of this file is governed by the BSD 3-clause license that
4  * can be found in the LICENSE.txt file in the project root.
5  */
6 
7 package org.antlr.v4.test.tool;
8 
9 import org.antlr.v4.runtime.ANTLRFileStream;
10 import org.antlr.v4.runtime.ANTLRInputStream;
11 import org.antlr.v4.runtime.BailErrorStrategy;
12 import org.antlr.v4.runtime.BaseErrorListener;
13 import org.antlr.v4.runtime.CharStream;
14 import org.antlr.v4.runtime.CommonTokenStream;
15 import org.antlr.v4.runtime.DefaultErrorStrategy;
16 import org.antlr.v4.runtime.DiagnosticErrorListener;
17 import org.antlr.v4.runtime.Lexer;
18 import org.antlr.v4.runtime.Parser;
19 import org.antlr.v4.runtime.ParserInterpreter;
20 import org.antlr.v4.runtime.ParserRuleContext;
21 import org.antlr.v4.runtime.RecognitionException;
22 import org.antlr.v4.runtime.Recognizer;
23 import org.antlr.v4.runtime.Token;
24 import org.antlr.v4.runtime.TokenSource;
25 import org.antlr.v4.runtime.TokenStream;
26 import org.antlr.v4.runtime.atn.ATN;
27 import org.antlr.v4.runtime.atn.ATNConfig;
28 import org.antlr.v4.runtime.atn.ATNConfigSet;
29 import org.antlr.v4.runtime.atn.LexerATNSimulator;
30 import org.antlr.v4.runtime.atn.ParserATNSimulator;
31 import org.antlr.v4.runtime.atn.PredictionContextCache;
32 import org.antlr.v4.runtime.atn.PredictionMode;
33 import org.antlr.v4.runtime.dfa.DFA;
34 import org.antlr.v4.runtime.dfa.DFAState;
35 import org.antlr.v4.runtime.misc.Interval;
36 import org.antlr.v4.runtime.misc.MurmurHash;
37 import org.antlr.v4.runtime.misc.ParseCancellationException;
38 import org.antlr.v4.runtime.tree.ErrorNode;
39 import org.antlr.v4.runtime.tree.ParseTree;
40 import org.antlr.v4.runtime.tree.ParseTreeListener;
41 import org.antlr.v4.runtime.tree.ParseTreeWalker;
42 import org.antlr.v4.runtime.tree.TerminalNode;
43 import org.antlr.v4.test.runtime.BaseRuntimeTest;
44 import org.antlr.v4.test.runtime.ErrorQueue;
45 import org.junit.Assert;
46 import org.junit.Before;
47 import org.junit.Test;
48 
49 import java.io.File;
50 import java.io.FilenameFilter;
51 import java.io.IOException;
52 import java.lang.ref.Reference;
53 import java.lang.ref.SoftReference;
54 import java.lang.ref.WeakReference;
55 import java.lang.reflect.Constructor;
56 import java.lang.reflect.InvocationTargetException;
57 import java.lang.reflect.Method;
58 import java.net.URL;
59 import java.net.URLClassLoader;
60 import java.util.ArrayList;
61 import java.util.Arrays;
62 import java.util.BitSet;
63 import java.util.Collection;
64 import java.util.Collections;
65 import java.util.HashSet;
66 import java.util.List;
67 import java.util.Random;
68 import java.util.Set;
69 import java.util.concurrent.Callable;
70 import java.util.concurrent.ExecutionException;
71 import java.util.concurrent.ExecutorService;
72 import java.util.concurrent.Executors;
73 import java.util.concurrent.Future;
74 import java.util.concurrent.ThreadFactory;
75 import java.util.concurrent.TimeUnit;
76 import java.util.concurrent.atomic.AtomicInteger;
77 import java.util.concurrent.atomic.AtomicIntegerArray;
78 import java.util.logging.Level;
79 import java.util.logging.Logger;
80 
81 import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
82 import static org.hamcrest.CoreMatchers.instanceOf;
83 import static org.junit.Assert.assertThat;
84 import static org.junit.Assert.assertTrue;
85 
86 @SuppressWarnings("unused")
87 public class TestPerformance extends BaseJavaToolTest {
88     /**
89      * Parse all java files under this package within the JDK_SOURCE_ROOT
90      * (environment variable or property defined on the Java command line).
91      */
92     private static final String TOP_PACKAGE = "java.lang";
93     /**
94      * {@code true} to load java files from sub-packages of
95      * {@link #TOP_PACKAGE}.
96      */
97     private static final boolean RECURSIVE = true;
98 	/**
99 	 * {@code true} to read all source files from disk into memory before
100 	 * starting the parse. The default value is {@code true} to help prevent
101 	 * drive speed from affecting the performance results. This value may be set
102 	 * to {@code false} to support parsing large input sets which would not
103 	 * otherwise fit into memory.
104 	 */
105 	private static final boolean PRELOAD_SOURCES = true;
106 	/**
107 	 * The encoding to use when reading source files.
108 	 */
109 	private static final String ENCODING = "UTF-8";
110 	/**
111 	 * The maximum number of files to parse in a single iteration.
112 	 */
113 	private static final int MAX_FILES_PER_PARSE_ITERATION = Integer.MAX_VALUE;
114 
115 	/**
116 	 * {@code true} to call {@link Collections#shuffle} on the list of input
117 	 * files before the first parse iteration.
118 	 */
119 	private static final boolean SHUFFLE_FILES_AT_START = false;
120 	/**
121 	 * {@code true} to call {@link Collections#shuffle} before each parse
122 	 * iteration <em>after</em> the first.
123 	 */
124 	private static final boolean SHUFFLE_FILES_AFTER_ITERATIONS = false;
125 	/**
126 	 * The instance of {@link Random} passed when calling
127 	 * {@link Collections#shuffle}.
128 	 */
129 	private static final Random RANDOM = new Random();
130 
131     /**
132      * {@code true} to use the Java grammar with expressions in the v4
133      * left-recursive syntax (JavaLR.g4). {@code false} to use the standard
134      * grammar (Java.g4). In either case, the grammar is renamed in the
135      * temporary directory to Java.g4 before compiling.
136      */
137     private static final boolean USE_LR_GRAMMAR = true;
138     /**
139      * {@code true} to specify the {@code -Xforce-atn} option when generating
140      * the grammar, forcing all decisions in {@code JavaParser} to be handled by
141      * {@link ParserATNSimulator#adaptivePredict}.
142      */
143     private static final boolean FORCE_ATN = false;
144     /**
145      * {@code true} to specify the {@code -atn} option when generating the
146      * grammar. This will cause ANTLR to export the ATN for each decision as a
147      * DOT (GraphViz) file.
148      */
149     private static final boolean EXPORT_ATN_GRAPHS = true;
150 	/**
151 	 * {@code true} to specify the {@code -XdbgST} option when generating the
152 	 * grammar.
153 	 */
154 	private static final boolean DEBUG_TEMPLATES = false;
155 	/**
156 	 * {@code true} to specify the {@code -XdbgSTWait} option when generating the
157 	 * grammar.
158 	 */
159 	private static final boolean DEBUG_TEMPLATES_WAIT = DEBUG_TEMPLATES;
160     /**
161      * {@code true} to delete temporary (generated and compiled) files when the
162      * test completes.
163      */
164     private static final boolean DELETE_TEMP_FILES = true;
165 	/**
166 	 * {@code true} to use a {@link ParserInterpreter} for parsing instead of
167 	 * generated parser.
168 	 */
169 	private static final boolean USE_PARSER_INTERPRETER = false;
170 
171 	/**
172 	 * {@code true} to call {@link System#gc} and then wait for 5 seconds at the
173 	 * end of the test to make it easier for a profiler to grab a heap dump at
174 	 * the end of the test run.
175 	 */
176     private static final boolean PAUSE_FOR_HEAP_DUMP = false;
177 
178     /**
179      * Parse each file with {@code JavaParser.compilationUnit}.
180      */
181     private static final boolean RUN_PARSER = true;
182     /**
183      * {@code true} to use {@link BailErrorStrategy}, {@code false} to use
184      * {@link DefaultErrorStrategy}.
185      */
186     private static final boolean BAIL_ON_ERROR = false;
187 	/**
188 	 * {@code true} to compute a checksum for verifying consistency across
189 	 * optimizations and multiple passes.
190 	 */
191 	private static final boolean COMPUTE_CHECKSUM = true;
192     /**
193      * This value is passed to {@link Parser#setBuildParseTree}.
194      */
195     private static final boolean BUILD_PARSE_TREES = false;
196     /**
197      * Use
198      * {@link ParseTreeWalker#DEFAULT}{@code .}{@link ParseTreeWalker#walk walk}
199      * with the {@code JavaParserBaseListener} to show parse tree walking
200      * overhead. If {@link #BUILD_PARSE_TREES} is {@code false}, the listener
201      * will instead be called during the parsing process via
202      * {@link Parser#addParseListener}.
203      */
204     private static final boolean BLANK_LISTENER = false;
205 
206 	/**
207 	 * Shows the number of {@link DFAState} and {@link ATNConfig} instances in
208 	 * the DFA cache at the end of each pass. If {@link #REUSE_LEXER_DFA} and/or
209 	 * {@link #REUSE_PARSER_DFA} are false, the corresponding instance numbers
210 	 * will only apply to one file (the last file if {@link #NUMBER_OF_THREADS}
211 	 * is 0, otherwise the last file which was parsed on the first thread).
212 	 */
213     private static final boolean SHOW_DFA_STATE_STATS = true;
214 	/**
215 	 * If {@code true}, the DFA state statistics report includes a breakdown of
216 	 * the number of DFA states contained in each decision (with rule names).
217 	 */
218 	private static final boolean DETAILED_DFA_STATE_STATS = true;
219 
220 	/**
221 	 * Specify the {@link PredictionMode} used by the
222 	 * {@link ParserATNSimulator}. If {@link #TWO_STAGE_PARSING} is
223 	 * {@code true}, this value only applies to the second stage, as the first
224 	 * stage will always use {@link PredictionMode#SLL}.
225 	 */
226 	private static final PredictionMode PREDICTION_MODE = PredictionMode.LL;
227 
228 	private static final boolean TWO_STAGE_PARSING = true;
229 
230     private static final boolean SHOW_CONFIG_STATS = false;
231 
232 	/**
233 	 * If {@code true}, detailed statistics for the number of DFA edges were
234 	 * taken while parsing each file, as well as the number of DFA edges which
235 	 * required on-the-fly computation.
236 	 */
237 	private static final boolean COMPUTE_TRANSITION_STATS = false;
238 	private static final boolean SHOW_TRANSITION_STATS_PER_FILE = false;
239 	/**
240 	 * If {@code true}, the transition statistics will be adjusted to a running
241 	 * total before reporting the final results.
242 	 */
243 	private static final boolean TRANSITION_RUNNING_AVERAGE = false;
244 	/**
245 	 * If {@code true}, transition statistics will be weighted according to the
246 	 * total number of transitions taken during the parsing of each file.
247 	 */
248 	private static final boolean TRANSITION_WEIGHTED_AVERAGE = false;
249 
250 	/**
251 	 * If {@code true}, after each pass a summary of the time required to parse
252 	 * each file will be printed.
253 	 */
254 	private static final boolean COMPUTE_TIMING_STATS = false;
255 	/**
256 	 * If {@code true}, the timing statistics for {@link #COMPUTE_TIMING_STATS}
257 	 * will be cumulative (i.e. the time reported for the <em>n</em>th file will
258 	 * be the total time required to parse the first <em>n</em> files).
259 	 */
260 	private static final boolean TIMING_CUMULATIVE = false;
261 	/**
262 	 * If {@code true}, the timing statistics will include the parser only. This
263 	 * flag allows for targeted measurements, and helps eliminate variance when
264 	 * {@link #PRELOAD_SOURCES} is {@code false}.
265 	 * <p/>
266 	 * This flag has no impact when {@link #RUN_PARSER} is {@code false}.
267 	 */
268 	private static final boolean TIME_PARSE_ONLY = false;
269 
270 	/**
271 	 * When {@code true}, messages will be printed to {@link System#err} when
272 	 * the first stage (SLL) parsing resulted in a syntax error. This option is
273 	 * ignored when {@link #TWO_STAGE_PARSING} is {@code false}.
274 	 */
275 	private static final boolean REPORT_SECOND_STAGE_RETRY = true;
276 	private static final boolean REPORT_SYNTAX_ERRORS = true;
277 	private static final boolean REPORT_AMBIGUITIES = false;
278 	private static final boolean REPORT_FULL_CONTEXT = false;
279 	private static final boolean REPORT_CONTEXT_SENSITIVITY = REPORT_FULL_CONTEXT;
280 
281     /**
282      * If {@code true}, a single {@code JavaLexer} will be used, and
283      * {@link Lexer#setInputStream} will be called to initialize it for each
284      * source file. Otherwise, a new instance will be created for each file.
285      */
286     private static final boolean REUSE_LEXER = false;
287 	/**
288 	 * If {@code true}, a single DFA will be used for lexing which is shared
289 	 * across all threads and files. Otherwise, each file will be lexed with its
290 	 * own DFA which is accomplished by creating one ATN instance per thread and
291 	 * clearing its DFA cache before lexing each file.
292 	 */
293 	private static final boolean REUSE_LEXER_DFA = true;
294     /**
295      * If {@code true}, a single {@code JavaParser} will be used, and
296      * {@link Parser#setInputStream} will be called to initialize it for each
297      * source file. Otherwise, a new instance will be created for each file.
298      */
299     private static final boolean REUSE_PARSER = false;
300 	/**
301 	 * If {@code true}, a single DFA will be used for parsing which is shared
302 	 * across all threads and files. Otherwise, each file will be parsed with
303 	 * its own DFA which is accomplished by creating one ATN instance per thread
304 	 * and clearing its DFA cache before parsing each file.
305 	 */
306 	private static final boolean REUSE_PARSER_DFA = true;
307     /**
308      * If {@code true}, the shared lexer and parser are reset after each pass.
309      * If {@code false}, all passes after the first will be fully "warmed up",
310      * which makes them faster and can compare them to the first warm-up pass,
311      * but it will not distinguish bytecode load/JIT time from warm-up time
312      * during the first pass.
313      */
314     private static final boolean CLEAR_DFA = false;
315     /**
316      * Total number of passes to make over the source.
317      */
318     private static final int PASSES = 4;
319 
320 	/**
321 	 * This option controls the granularity of multi-threaded parse operations.
322 	 * If {@code true}, the parsing operation will be parallelized across files;
323 	 * otherwise the parsing will be parallelized across multiple iterations.
324 	 */
325 	private static final boolean FILE_GRANULARITY = true;
326 
327 	/**
328 	 * Number of parser threads to use.
329 	 */
330 	private static final int NUMBER_OF_THREADS = 1;
331 
332     private static final Lexer[] sharedLexers = new Lexer[NUMBER_OF_THREADS];
333 
334     private static final Parser[] sharedParsers = new Parser[NUMBER_OF_THREADS];
335 
336     private static final ParseTreeListener[] sharedListeners = new ParseTreeListener[NUMBER_OF_THREADS];
337 
338 	private static final long[][] totalTransitionsPerFile;
339 	private static final long[][] computedTransitionsPerFile;
340 	static {
341 		if (COMPUTE_TRANSITION_STATS) {
342 			totalTransitionsPerFile = new long[PASSES][];
343 			computedTransitionsPerFile = new long[PASSES][];
344 		} else {
345 			totalTransitionsPerFile = null;
346 			computedTransitionsPerFile = null;
347 		}
348 	}
349 
350 	private static final long[][][] decisionInvocationsPerFile;
351 	private static final long[][][] fullContextFallbackPerFile;
352 	private static final long[][][] nonSllPerFile;
353 	private static final long[][][] totalTransitionsPerDecisionPerFile;
354 	private static final long[][][] computedTransitionsPerDecisionPerFile;
355 	private static final long[][][] fullContextTransitionsPerDecisionPerFile;
356 	static {
357 		if (COMPUTE_TRANSITION_STATS && DETAILED_DFA_STATE_STATS) {
358 			decisionInvocationsPerFile = new long[PASSES][][];
359 			fullContextFallbackPerFile = new long[PASSES][][];
360 			nonSllPerFile = new long[PASSES][][];
361 			totalTransitionsPerDecisionPerFile = new long[PASSES][][];
362 			computedTransitionsPerDecisionPerFile = new long[PASSES][][];
363 			fullContextTransitionsPerDecisionPerFile = new long[PASSES][][];
364 		} else {
365 			decisionInvocationsPerFile = null;
366 			fullContextFallbackPerFile = null;
367 			nonSllPerFile = null;
368 			totalTransitionsPerDecisionPerFile = null;
369 			computedTransitionsPerDecisionPerFile = null;
370 			fullContextTransitionsPerDecisionPerFile = null;
371 		}
372 	}
373 
374 	private static final long[][] timePerFile;
375 	private static final int[][] tokensPerFile;
376 	static {
377 		if (COMPUTE_TIMING_STATS) {
378 			timePerFile = new long[PASSES][];
379 			tokensPerFile = new int[PASSES][];
380 		} else {
381 			timePerFile = null;
382 			tokensPerFile = null;
383 		}
384 	}
385 
386     private final AtomicIntegerArray tokenCount = new AtomicIntegerArray(PASSES);
387 
388 	@Before
389 	@Override
testSetUp()390 	public void testSetUp() throws Exception {
391 		super.testSetUp();
392 	}
393 
394     @Test
395     @org.junit.Ignore
compileJdk()396     public void compileJdk() throws IOException, InterruptedException, ExecutionException {
397         String jdkSourceRoot = getSourceRoot("JDK");
398 		assertTrue("The JDK_SOURCE_ROOT environment variable must be set for performance testing.", jdkSourceRoot != null && !jdkSourceRoot.isEmpty());
399 
400         compileJavaParser(USE_LR_GRAMMAR);
401         final String lexerName    = USE_LR_GRAMMAR ? "JavaLRLexer"        : "JavaLexer";
402         final String parserName   = USE_LR_GRAMMAR ? "JavaLRParser"       : "JavaParser";
403         final String listenerName = USE_LR_GRAMMAR ? "JavaLRBaseListener" : "JavaBaseListener";
404         final String entryPoint = "compilationUnit";
405         final ParserFactory factory = getParserFactory(lexerName, parserName, listenerName, entryPoint);
406 
407 		if (!TOP_PACKAGE.isEmpty()) {
408             jdkSourceRoot = jdkSourceRoot + '/' + TOP_PACKAGE.replace('.', '/');
409         }
410 
411         File directory = new File(jdkSourceRoot);
412         assertTrue(directory.isDirectory());
413 
414 		FilenameFilter filesFilter = FilenameFilters.extension(".java", false);
415 		FilenameFilter directoriesFilter = FilenameFilters.ALL_FILES;
416 		final List<InputDescriptor> sources = loadSources(directory, filesFilter, directoriesFilter, RECURSIVE);
417 
418 		for (int i = 0; i < PASSES; i++) {
419 			if (COMPUTE_TRANSITION_STATS) {
420 				totalTransitionsPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
421 				computedTransitionsPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
422 
423 				if (DETAILED_DFA_STATE_STATS) {
424 					decisionInvocationsPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
425 					fullContextFallbackPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
426 					nonSllPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
427 					totalTransitionsPerDecisionPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
428 					computedTransitionsPerDecisionPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
429 					fullContextTransitionsPerDecisionPerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)][];
430 				}
431 			}
432 
433 			if (COMPUTE_TIMING_STATS) {
434 				timePerFile[i] = new long[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
435 				tokensPerFile[i] = new int[Math.min(sources.size(), MAX_FILES_PER_PARSE_ITERATION)];
436 			}
437 		}
438 
439 		System.out.format("Located %d source files.%n", sources.size());
440 		System.out.print(getOptionsDescription(TOP_PACKAGE));
441 
442 		ExecutorService executorService = Executors.newFixedThreadPool(FILE_GRANULARITY ? 1 : NUMBER_OF_THREADS, new NumberedThreadFactory());
443 
444 		List<Future<?>> passResults = new ArrayList<Future<?>>();
445 		passResults.add(executorService.submit(new Runnable() {
446 			@Override
447 			public void run() {
448 				try {
449 					parse1(0, factory, sources, SHUFFLE_FILES_AT_START);
450 				} catch (InterruptedException ex) {
451 					Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
452 				}
453 			}
454 		}));
455         for (int i = 0; i < PASSES - 1; i++) {
456             final int currentPass = i + 1;
457 			passResults.add(executorService.submit(new Runnable() {
458 				@Override
459 				public void run() {
460 					if (CLEAR_DFA) {
461 						int index = FILE_GRANULARITY ? 0 : ((NumberedThread)Thread.currentThread()).getThreadNumber();
462 						if (sharedLexers.length > 0 && sharedLexers[index] != null) {
463 							ATN atn = sharedLexers[index].getATN();
464 							for (int j = 0; j < sharedLexers[index].getInterpreter().decisionToDFA.length; j++) {
465 								sharedLexers[index].getInterpreter().decisionToDFA[j] = new DFA(atn.getDecisionState(j), j);
466 							}
467 						}
468 
469 						if (sharedParsers.length > 0 && sharedParsers[index] != null) {
470 							ATN atn = sharedParsers[index].getATN();
471 							for (int j = 0; j < sharedParsers[index].getInterpreter().decisionToDFA.length; j++) {
472 								sharedParsers[index].getInterpreter().decisionToDFA[j] = new DFA(atn.getDecisionState(j), j);
473 							}
474 						}
475 
476 						if (FILE_GRANULARITY) {
477 							Arrays.fill(sharedLexers, null);
478 							Arrays.fill(sharedParsers, null);
479 						}
480 					}
481 
482 					try {
483 						parse2(currentPass, factory, sources, SHUFFLE_FILES_AFTER_ITERATIONS);
484 					} catch (InterruptedException ex) {
485 						Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
486 					}
487 				}
488 			}));
489         }
490 
491 		for (Future<?> passResult : passResults) {
492 			passResult.get();
493 		}
494 
495 		executorService.shutdown();
496 		executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
497 
498 		if (COMPUTE_TRANSITION_STATS && SHOW_TRANSITION_STATS_PER_FILE) {
499 			computeTransitionStatistics();
500 		}
501 
502 		if (COMPUTE_TIMING_STATS) {
503 			computeTimingStatistics();
504 		}
505 
506 		sources.clear();
507 		if (PAUSE_FOR_HEAP_DUMP) {
508 			System.gc();
509 			System.out.println("Pausing before application exit.");
510 			try {
511 				Thread.sleep(4000);
512 			} catch (InterruptedException ex) {
513 				Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
514 			}
515 		}
516     }
517 
518 	/**
519 	 * Compute and print ATN/DFA transition statistics.
520 	 */
computeTransitionStatistics()521 	private void computeTransitionStatistics() {
522 		if (TRANSITION_RUNNING_AVERAGE) {
523 			for (int i = 0; i < PASSES; i++) {
524 				long[] data = computedTransitionsPerFile[i];
525 				for (int j = 0; j < data.length - 1; j++) {
526 					data[j + 1] += data[j];
527 				}
528 
529 				data = totalTransitionsPerFile[i];
530 				for (int j = 0; j < data.length - 1; j++) {
531 					data[j + 1] += data[j];
532 				}
533 			}
534 		}
535 
536 		long[] sumNum = new long[totalTransitionsPerFile[0].length];
537 		long[] sumDen = new long[totalTransitionsPerFile[0].length];
538 		double[] sumNormalized = new double[totalTransitionsPerFile[0].length];
539 		for (int i = 0; i < PASSES; i++) {
540 			long[] num = computedTransitionsPerFile[i];
541 			long[] den = totalTransitionsPerFile[i];
542 			for (int j = 0; j < den.length; j++) {
543 				sumNum[j] += num[j];
544 				sumDen[j] += den[j];
545 				if (den[j] > 0) {
546 					sumNormalized[j] += (double)num[j] / (double)den[j];
547 				}
548 			}
549 		}
550 
551 		double[] weightedAverage = new double[totalTransitionsPerFile[0].length];
552 		double[] average = new double[totalTransitionsPerFile[0].length];
553 		for (int i = 0; i < average.length; i++) {
554 			if (sumDen[i] > 0) {
555 				weightedAverage[i] = (double)sumNum[i] / (double)sumDen[i];
556 			}
557 			else {
558 				weightedAverage[i] = 0;
559 			}
560 
561 			average[i] = sumNormalized[i] / PASSES;
562 		}
563 
564 		double[] low95 = new double[totalTransitionsPerFile[0].length];
565 		double[] high95 = new double[totalTransitionsPerFile[0].length];
566 		double[] low67 = new double[totalTransitionsPerFile[0].length];
567 		double[] high67 = new double[totalTransitionsPerFile[0].length];
568 		double[] stddev = new double[totalTransitionsPerFile[0].length];
569 		for (int i = 0; i < stddev.length; i++) {
570 			double[] points = new double[PASSES];
571 			for (int j = 0; j < PASSES; j++) {
572 				long totalTransitions = totalTransitionsPerFile[j][i];
573 				if (totalTransitions > 0) {
574 					points[j] = ((double)computedTransitionsPerFile[j][i] / (double)totalTransitionsPerFile[j][i]);
575 				}
576 				else {
577 					points[j] = 0;
578 				}
579 			}
580 
581 			Arrays.sort(points);
582 
583 			final double averageValue = TRANSITION_WEIGHTED_AVERAGE ? weightedAverage[i] : average[i];
584 			double value = 0;
585 			for (int j = 0; j < PASSES; j++) {
586 				double diff = points[j] - averageValue;
587 				value += diff * diff;
588 			}
589 
590 			int ignoreCount95 = (int)Math.round(PASSES * (1 - 0.95) / 2.0);
591 			int ignoreCount67 = (int)Math.round(PASSES * (1 - 0.667) / 2.0);
592 			low95[i] = points[ignoreCount95];
593 			high95[i] = points[points.length - 1 - ignoreCount95];
594 			low67[i] = points[ignoreCount67];
595 			high67[i] = points[points.length - 1 - ignoreCount67];
596 			stddev[i] = Math.sqrt(value / PASSES);
597 		}
598 
599 		System.out.format("File\tAverage\tStd. Dev.\t95%% Low\t95%% High\t66.7%% Low\t66.7%% High%n");
600 		for (int i = 0; i < stddev.length; i++) {
601 			final double averageValue = TRANSITION_WEIGHTED_AVERAGE ? weightedAverage[i] : average[i];
602 			System.out.format("%d\t%e\t%e\t%e\t%e\t%e\t%e%n", i + 1, averageValue, stddev[i], averageValue - low95[i], high95[i] - averageValue, averageValue - low67[i], high67[i] - averageValue);
603 		}
604 	}
605 
606 	/**
607 	 * Compute and print timing statistics.
608 	 */
computeTimingStatistics()609 	private void computeTimingStatistics() {
610 		if (TIMING_CUMULATIVE) {
611 			for (int i = 0; i < PASSES; i++) {
612 				long[] data = timePerFile[i];
613 				for (int j = 0; j < data.length - 1; j++) {
614 					data[j + 1] += data[j];
615 				}
616 
617 				int[] data2 = tokensPerFile[i];
618 				for (int j = 0; j < data2.length - 1; j++) {
619 					data2[j + 1] += data2[j];
620 				}
621 			}
622 		}
623 
624 		final int fileCount = timePerFile[0].length;
625 		double[] sum = new double[fileCount];
626 		for (int i = 0; i < PASSES; i++) {
627 			long[] data = timePerFile[i];
628 			int[] tokenData = tokensPerFile[i];
629 			for (int j = 0; j < data.length; j++) {
630 				sum[j] += (double)data[j] / (double)tokenData[j];
631 			}
632 		}
633 
634 		double[] average = new double[fileCount];
635 		for (int i = 0; i < average.length; i++) {
636 			average[i] = sum[i] / PASSES;
637 		}
638 
639 		double[] low95 = new double[fileCount];
640 		double[] high95 = new double[fileCount];
641 		double[] low67 = new double[fileCount];
642 		double[] high67 = new double[fileCount];
643 		double[] stddev = new double[fileCount];
644 		for (int i = 0; i < stddev.length; i++) {
645 			double[] points = new double[PASSES];
646 			for (int j = 0; j < PASSES; j++) {
647 				points[j] = (double)timePerFile[j][i] / (double)tokensPerFile[j][i];
648 			}
649 
650 			Arrays.sort(points);
651 
652 			final double averageValue = average[i];
653 			double value = 0;
654 			for (int j = 0; j < PASSES; j++) {
655 				double diff = points[j] - averageValue;
656 				value += diff * diff;
657 			}
658 
659 			int ignoreCount95 = (int)Math.round(PASSES * (1 - 0.95) / 2.0);
660 			int ignoreCount67 = (int)Math.round(PASSES * (1 - 0.667) / 2.0);
661 			low95[i] = points[ignoreCount95];
662 			high95[i] = points[points.length - 1 - ignoreCount95];
663 			low67[i] = points[ignoreCount67];
664 			high67[i] = points[points.length - 1 - ignoreCount67];
665 			stddev[i] = Math.sqrt(value / PASSES);
666 		}
667 
668 		System.out.format("File\tAverage\tStd. Dev.\t95%% Low\t95%% High\t66.7%% Low\t66.7%% High%n");
669 		for (int i = 0; i < stddev.length; i++) {
670 			final double averageValue = average[i];
671 			System.out.format("%d\t%e\t%e\t%e\t%e\t%e\t%e%n", i + 1, averageValue, stddev[i], averageValue - low95[i], high95[i] - averageValue, averageValue - low67[i], high67[i] - averageValue);
672 		}
673 	}
674 
getSourceRoot(String prefix)675 	private String getSourceRoot(String prefix) {
676 		String sourceRoot = System.getenv(prefix+"_SOURCE_ROOT");
677 		if (sourceRoot == null) {
678 			sourceRoot = System.getProperty(prefix+"_SOURCE_ROOT");
679 		}
680 
681 		return sourceRoot;
682 	}
683 
684     @Override
eraseTempDir()685     public void eraseTempDir() {
686         if (DELETE_TEMP_FILES) {
687             super.eraseTempDir();
688         }
689     }
690 
getOptionsDescription(String topPackage)691     public static String getOptionsDescription(String topPackage) {
692         StringBuilder builder = new StringBuilder();
693         builder.append("Input=");
694         if (topPackage.isEmpty()) {
695             builder.append("*");
696         }
697         else {
698             builder.append(topPackage).append(".*");
699         }
700 
701         builder.append(", Grammar=").append(USE_LR_GRAMMAR ? "LR" : "Standard");
702         builder.append(", ForceAtn=").append(FORCE_ATN);
703 
704         builder.append(newline);
705 
706         builder.append("Op=Lex").append(RUN_PARSER ? "+Parse" : " only");
707         builder.append(", Strategy=").append(BAIL_ON_ERROR ? BailErrorStrategy.class.getSimpleName() : DefaultErrorStrategy.class.getSimpleName());
708         builder.append(", BuildParseTree=").append(BUILD_PARSE_TREES);
709         builder.append(", WalkBlankListener=").append(BLANK_LISTENER);
710 
711         builder.append(newline);
712 
713         builder.append("Lexer=").append(REUSE_LEXER ? "setInputStream" : "newInstance");
714         builder.append(", Parser=").append(REUSE_PARSER ? "setInputStream" : "newInstance");
715         builder.append(", AfterPass=").append(CLEAR_DFA ? "newInstance" : "setInputStream");
716 
717         builder.append(newline);
718 
719         return builder.toString();
720     }
721 
722     /**
723      *  This method is separate from {@link #parse2} so the first pass can be distinguished when analyzing
724      *  profiler results.
725      */
parse1(int currentPass, ParserFactory factory, Collection<InputDescriptor> sources, boolean shuffleSources)726     protected void parse1(int currentPass, ParserFactory factory, Collection<InputDescriptor> sources, boolean shuffleSources) throws InterruptedException {
727 		if (FILE_GRANULARITY) {
728 			System.gc();
729 		}
730 
731         parseSources(currentPass, factory, sources, shuffleSources);
732     }
733 
734     /**
735      *  This method is separate from {@link #parse1} so the first pass can be distinguished when analyzing
736      *  profiler results.
737      */
parse2(int currentPass, ParserFactory factory, Collection<InputDescriptor> sources, boolean shuffleSources)738     protected void parse2(int currentPass, ParserFactory factory, Collection<InputDescriptor> sources, boolean shuffleSources) throws InterruptedException {
739 		if (FILE_GRANULARITY) {
740 			System.gc();
741 		}
742 
743         parseSources(currentPass, factory, sources, shuffleSources);
744     }
745 
loadSources(File directory, FilenameFilter filesFilter, FilenameFilter directoriesFilter, boolean recursive)746     protected List<InputDescriptor> loadSources(File directory, FilenameFilter filesFilter, FilenameFilter directoriesFilter, boolean recursive) {
747         List<InputDescriptor> result = new ArrayList<InputDescriptor>();
748         loadSources(directory, filesFilter, directoriesFilter, recursive, result);
749         return result;
750     }
751 
loadSources(File directory, FilenameFilter filesFilter, FilenameFilter directoriesFilter, boolean recursive, Collection<InputDescriptor> result)752     protected void loadSources(File directory, FilenameFilter filesFilter, FilenameFilter directoriesFilter, boolean recursive, Collection<InputDescriptor> result) {
753         assert directory.isDirectory();
754 
755         File[] sources = directory.listFiles(filesFilter);
756         for (File file : sources) {
757 			if (!file.isFile()) {
758 				continue;
759 			}
760 
761 			result.add(new InputDescriptor(file.getAbsolutePath()));
762         }
763 
764         if (recursive) {
765             File[] children = directory.listFiles(directoriesFilter);
766             for (File child : children) {
767                 if (child.isDirectory()) {
768                     loadSources(child, filesFilter, directoriesFilter, true, result);
769                 }
770             }
771         }
772     }
773 
774     int configOutputSize = 0;
775 
parseSources(final int currentPass, final ParserFactory factory, Collection<InputDescriptor> sources, boolean shuffleSources)776 	protected void parseSources(final int currentPass, final ParserFactory factory, Collection<InputDescriptor> sources, boolean shuffleSources) throws InterruptedException {
777 		if (shuffleSources) {
778 			List<InputDescriptor> sourcesList = new ArrayList<InputDescriptor>(sources);
779 			synchronized (RANDOM) {
780 				Collections.shuffle(sourcesList, RANDOM);
781 			}
782 
783 			sources = sourcesList;
784 		}
785 
786 		long startTime = System.nanoTime();
787         tokenCount.set(currentPass, 0);
788         int inputSize = 0;
789 		int inputCount = 0;
790 
791 		Collection<Future<FileParseResult>> results = new ArrayList<Future<FileParseResult>>();
792 		ExecutorService executorService;
793 		if (FILE_GRANULARITY) {
794 			executorService = Executors.newFixedThreadPool(FILE_GRANULARITY ? NUMBER_OF_THREADS : 1, new NumberedThreadFactory());
795 		} else {
796 			executorService = Executors.newSingleThreadExecutor(new FixedThreadNumberFactory(((NumberedThread)Thread.currentThread()).getThreadNumber()));
797 		}
798 
799 		for (InputDescriptor inputDescriptor : sources) {
800 			if (inputCount >= MAX_FILES_PER_PARSE_ITERATION) {
801 				break;
802 			}
803 
804 			final CharStream input = inputDescriptor.getInputStream();
805             input.seek(0);
806             inputSize += input.size();
807 			inputCount++;
808 			Future<FileParseResult> futureChecksum = executorService.submit(new Callable<FileParseResult>() {
809 				@Override
810 				public FileParseResult call() {
811 					// this incurred a great deal of overhead and was causing significant variations in performance results.
812 					//System.out.format("Parsing file %s\n", input.getSourceName());
813 					try {
814 						return factory.parseFile(input, currentPass, ((NumberedThread)Thread.currentThread()).getThreadNumber());
815 					} catch (IllegalStateException ex) {
816 						ex.printStackTrace(System.err);
817 					} catch (Throwable t) {
818 						t.printStackTrace(System.err);
819 					}
820 
821 					return null;
822 				}
823 			});
824 
825 			results.add(futureChecksum);
826         }
827 
828 		MurmurHashChecksum checksum = new MurmurHashChecksum();
829 		int currentIndex = -1;
830 		for (Future<FileParseResult> future : results) {
831 			currentIndex++;
832 			int fileChecksum = 0;
833 			try {
834 				FileParseResult fileResult = future.get();
835 				if (COMPUTE_TRANSITION_STATS) {
836 					totalTransitionsPerFile[currentPass][currentIndex] = sum(fileResult.parserTotalTransitions);
837 					computedTransitionsPerFile[currentPass][currentIndex] = sum(fileResult.parserComputedTransitions);
838 
839 					if (DETAILED_DFA_STATE_STATS) {
840 						decisionInvocationsPerFile[currentPass][currentIndex] = fileResult.decisionInvocations;
841 						fullContextFallbackPerFile[currentPass][currentIndex] = fileResult.fullContextFallback;
842 						nonSllPerFile[currentPass][currentIndex] = fileResult.nonSll;
843 						totalTransitionsPerDecisionPerFile[currentPass][currentIndex] = fileResult.parserTotalTransitions;
844 						computedTransitionsPerDecisionPerFile[currentPass][currentIndex] = fileResult.parserComputedTransitions;
845 						fullContextTransitionsPerDecisionPerFile[currentPass][currentIndex] = fileResult.parserFullContextTransitions;
846 					}
847 				}
848 
849 				if (COMPUTE_TIMING_STATS) {
850 					timePerFile[currentPass][currentIndex] = fileResult.endTime - fileResult.startTime;
851 					tokensPerFile[currentPass][currentIndex] = fileResult.tokenCount;
852 				}
853 
854 				fileChecksum = fileResult.checksum;
855 			} catch (ExecutionException ex) {
856 				Logger.getLogger(TestPerformance.class.getName()).log(Level.SEVERE, null, ex);
857 			}
858 
859 			if (COMPUTE_CHECKSUM) {
860 				updateChecksum(checksum, fileChecksum);
861 			}
862 		}
863 
864 		executorService.shutdown();
865 		executorService.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS);
866 
867         System.out.format("%d. Total parse time for %d files (%d KB, %d tokens%s): %.0fms%n",
868 						  currentPass + 1,
869                           inputCount,
870                           inputSize / 1024,
871                           tokenCount.get(currentPass),
872 						  COMPUTE_CHECKSUM ? String.format(", checksum 0x%8X", checksum.getValue()) : "",
873                           (double)(System.nanoTime() - startTime) / 1000000.0);
874 
875 		if (sharedLexers.length > 0) {
876 			int index = FILE_GRANULARITY ? 0 : ((NumberedThread)Thread.currentThread()).getThreadNumber();
877 			Lexer lexer = sharedLexers[index];
878 			final LexerATNSimulator lexerInterpreter = lexer.getInterpreter();
879 			final DFA[] modeToDFA = lexerInterpreter.decisionToDFA;
880 			if (SHOW_DFA_STATE_STATS) {
881 				int states = 0;
882 				int configs = 0;
883 				Set<ATNConfig> uniqueConfigs = new HashSet<ATNConfig>();
884 
885 				for (int i = 0; i < modeToDFA.length; i++) {
886 					DFA dfa = modeToDFA[i];
887 					if (dfa == null) {
888 						continue;
889 					}
890 
891 					states += dfa.states.size();
892 					for (DFAState state : dfa.states.values()) {
893 						configs += state.configs.size();
894 						uniqueConfigs.addAll(state.configs);
895 					}
896 				}
897 
898 				System.out.format("There are %d lexer DFAState instances, %d configs (%d unique).%n", states, configs, uniqueConfigs.size());
899 
900 				if (DETAILED_DFA_STATE_STATS) {
901 					System.out.format("\tMode\tStates\tConfigs\tMode%n");
902 					for (int i = 0; i < modeToDFA.length; i++) {
903 						DFA dfa = modeToDFA[i];
904 						if (dfa == null || dfa.states.isEmpty()) {
905 							continue;
906 						}
907 
908 						int modeConfigs = 0;
909 						for (DFAState state : dfa.states.values()) {
910 							modeConfigs += state.configs.size();
911 						}
912 
913 						String modeName = lexer.getModeNames()[i];
914 						System.out.format("\t%d\t%d\t%d\t%s%n", dfa.decision, dfa.states.size(), modeConfigs, modeName);
915 					}
916 				}
917 			}
918 		}
919 
920 		if (RUN_PARSER && sharedParsers.length > 0) {
921 			int index = FILE_GRANULARITY ? 0 : ((NumberedThread)Thread.currentThread()).getThreadNumber();
922 			Parser parser = sharedParsers[index];
923             // make sure the individual DFAState objects actually have unique ATNConfig arrays
924             final ParserATNSimulator interpreter = parser.getInterpreter();
925             final DFA[] decisionToDFA = interpreter.decisionToDFA;
926 
927             if (SHOW_DFA_STATE_STATS) {
928                 int states = 0;
929 				int configs = 0;
930 				Set<ATNConfig> uniqueConfigs = new HashSet<ATNConfig>();
931 
932                 for (int i = 0; i < decisionToDFA.length; i++) {
933                     DFA dfa = decisionToDFA[i];
934                     if (dfa == null) {
935                         continue;
936                     }
937 
938                     states += dfa.states.size();
939 					for (DFAState state : dfa.states.values()) {
940 						configs += state.configs.size();
941 						uniqueConfigs.addAll(state.configs);
942 					}
943                 }
944 
945                 System.out.format("There are %d parser DFAState instances, %d configs (%d unique).%n", states, configs, uniqueConfigs.size());
946 
947 				if (DETAILED_DFA_STATE_STATS) {
948 					if (COMPUTE_TRANSITION_STATS) {
949 						System.out.format("\tDecision\tStates\tConfigs\tPredict (ALL)\tPredict (LL)\tNon-SLL\tTransitions\tTransitions (ATN)\tTransitions (LL)\tLA (SLL)\tLA (LL)\tRule%n");
950 					}
951 					else {
952 						System.out.format("\tDecision\tStates\tConfigs\tRule%n");
953 					}
954 
955 					for (int i = 0; i < decisionToDFA.length; i++) {
956 						DFA dfa = decisionToDFA[i];
957 						if (dfa == null || dfa.states.isEmpty()) {
958 							continue;
959 						}
960 
961 						int decisionConfigs = 0;
962 						for (DFAState state : dfa.states.values()) {
963 							decisionConfigs += state.configs.size();
964 						}
965 
966 						String ruleName = parser.getRuleNames()[parser.getATN().decisionToState.get(dfa.decision).ruleIndex];
967 
968 						long calls = 0;
969 						long fullContextCalls = 0;
970 						long nonSllCalls = 0;
971 						long transitions = 0;
972 						long computedTransitions = 0;
973 						long fullContextTransitions = 0;
974 						double lookahead = 0;
975 						double fullContextLookahead = 0;
976 						String formatString;
977 						if (COMPUTE_TRANSITION_STATS) {
978 							for (long[] data : decisionInvocationsPerFile[currentPass]) {
979 								calls += data[i];
980 							}
981 
982 							for (long[] data : fullContextFallbackPerFile[currentPass]) {
983 								fullContextCalls += data[i];
984 							}
985 
986 							for (long[] data : nonSllPerFile[currentPass]) {
987 								nonSllCalls += data[i];
988 							}
989 
990 							for (long[] data : totalTransitionsPerDecisionPerFile[currentPass]) {
991 								transitions += data[i];
992 							}
993 
994 							for (long[] data : computedTransitionsPerDecisionPerFile[currentPass]) {
995 								computedTransitions += data[i];
996 							}
997 
998 							for (long[] data : fullContextTransitionsPerDecisionPerFile[currentPass]) {
999 								fullContextTransitions += data[i];
1000 							}
1001 
1002 							if (calls > 0) {
1003 								lookahead = (double)(transitions - fullContextTransitions) / (double)calls;
1004 							}
1005 
1006 							if (fullContextCalls > 0) {
1007 								fullContextLookahead = (double)fullContextTransitions / (double)fullContextCalls;
1008 							}
1009 
1010 							formatString = "\t%1$d\t%2$d\t%3$d\t%4$d\t%5$d\t%6$d\t%7$d\t%8$d\t%9$d\t%10$f\t%11$f\t%12$s%n";
1011 						}
1012 						else {
1013 							calls = 0;
1014 							formatString = "\t%1$d\t%2$d\t%3$d\t%12$s%n";
1015 						}
1016 
1017 						System.out.format(formatString, dfa.decision, dfa.states.size(), decisionConfigs, calls, fullContextCalls, nonSllCalls, transitions, computedTransitions, fullContextTransitions, lookahead, fullContextLookahead, ruleName);
1018 					}
1019 				}
1020             }
1021 
1022             int localDfaCount = 0;
1023             int globalDfaCount = 0;
1024             int localConfigCount = 0;
1025             int globalConfigCount = 0;
1026             int[] contextsInDFAState = new int[0];
1027 
1028             for (int i = 0; i < decisionToDFA.length; i++) {
1029                 DFA dfa = decisionToDFA[i];
1030                 if (dfa == null) {
1031                     continue;
1032                 }
1033 
1034                 if (SHOW_CONFIG_STATS) {
1035                     for (DFAState state : dfa.states.keySet()) {
1036                         if (state.configs.size() >= contextsInDFAState.length) {
1037                             contextsInDFAState = Arrays.copyOf(contextsInDFAState, state.configs.size() + 1);
1038                         }
1039 
1040                         if (state.isAcceptState) {
1041                             boolean hasGlobal = false;
1042                             for (ATNConfig config : state.configs) {
1043                                 if (config.reachesIntoOuterContext > 0) {
1044                                     globalConfigCount++;
1045                                     hasGlobal = true;
1046                                 } else {
1047                                     localConfigCount++;
1048                                 }
1049                             }
1050 
1051                             if (hasGlobal) {
1052                                 globalDfaCount++;
1053                             } else {
1054                                 localDfaCount++;
1055                             }
1056                         }
1057 
1058                         contextsInDFAState[state.configs.size()]++;
1059                     }
1060                 }
1061             }
1062 
1063             if (SHOW_CONFIG_STATS && currentPass == 0) {
1064                 System.out.format("  DFA accept states: %d total, %d with only local context, %d with a global context%n", localDfaCount + globalDfaCount, localDfaCount, globalDfaCount);
1065                 System.out.format("  Config stats: %d total, %d local, %d global%n", localConfigCount + globalConfigCount, localConfigCount, globalConfigCount);
1066                 if (SHOW_DFA_STATE_STATS) {
1067                     for (int i = 0; i < contextsInDFAState.length; i++) {
1068                         if (contextsInDFAState[i] != 0) {
1069                             System.out.format("  %d configs = %d%n", i, contextsInDFAState[i]);
1070                         }
1071                     }
1072                 }
1073             }
1074         }
1075 
1076 		if (COMPUTE_TIMING_STATS) {
1077 			System.out.format("File\tTokens\tTime%n");
1078 			for (int i = 0; i< timePerFile[currentPass].length; i++) {
1079 				System.out.format("%d\t%d\t%d%n", i + 1, tokensPerFile[currentPass][i], timePerFile[currentPass][i]);
1080 			}
1081 		}
1082     }
1083 
sum(long[] array)1084 	private static long sum(long[] array) {
1085 		long result = 0;
1086 		for (int i = 0; i < array.length; i++) {
1087 			result += array[i];
1088 		}
1089 
1090 		return result;
1091 	}
1092 
compileJavaParser(boolean leftRecursive)1093     protected void compileJavaParser(boolean leftRecursive) throws IOException {
1094         String grammarFileName = leftRecursive ? "JavaLR.g4"    : "Java.g4";
1095         String parserName      = leftRecursive ? "JavaLRParser" : "JavaParser";
1096         String lexerName       = leftRecursive ? "JavaLRLexer"  : "JavaLexer";
1097         String body = load(grammarFileName, null);
1098         List<String> extraOptions = new ArrayList<String>();
1099 		extraOptions.add("-Werror");
1100         if (FORCE_ATN) {
1101             extraOptions.add("-Xforce-atn");
1102         }
1103         if (EXPORT_ATN_GRAPHS) {
1104             extraOptions.add("-atn");
1105         }
1106 		if (DEBUG_TEMPLATES) {
1107 			extraOptions.add("-XdbgST");
1108 			if (DEBUG_TEMPLATES_WAIT) {
1109 				extraOptions.add("-XdbgSTWait");
1110 			}
1111 		}
1112 		extraOptions.add("-visitor");
1113         String[] extraOptionsArray = extraOptions.toArray(new String[extraOptions.size()]);
1114         boolean success = rawGenerateAndBuildRecognizer(grammarFileName, body, parserName, lexerName, true, extraOptionsArray);
1115         assertTrue(success);
1116     }
1117 
updateChecksum(MurmurHashChecksum checksum, int value)1118 	private static void updateChecksum(MurmurHashChecksum checksum, int value) {
1119 		checksum.update(value);
1120 	}
1121 
updateChecksum(MurmurHashChecksum checksum, Token token)1122 	private static void updateChecksum(MurmurHashChecksum checksum, Token token) {
1123 		if (token == null) {
1124 			checksum.update(0);
1125 			return;
1126 		}
1127 
1128 		updateChecksum(checksum, token.getStartIndex());
1129 		updateChecksum(checksum, token.getStopIndex());
1130 		updateChecksum(checksum, token.getLine());
1131 		updateChecksum(checksum, token.getCharPositionInLine());
1132 		updateChecksum(checksum, token.getType());
1133 		updateChecksum(checksum, token.getChannel());
1134 	}
1135 
getParserFactory(String lexerName, String parserName, String listenerName, final String entryPoint)1136     protected ParserFactory getParserFactory(String lexerName, String parserName, String listenerName, final String entryPoint) {
1137         try {
1138             ClassLoader loader = new URLClassLoader(new URL[] { new File(tmpdir).toURI().toURL() }, ClassLoader.getSystemClassLoader());
1139             final Class<? extends Lexer> lexerClass = loader.loadClass(lexerName).asSubclass(Lexer.class);
1140             final Class<? extends Parser> parserClass = loader.loadClass(parserName).asSubclass(Parser.class);
1141             final Class<? extends ParseTreeListener> listenerClass = loader.loadClass(listenerName).asSubclass(ParseTreeListener.class);
1142 
1143             final Constructor<? extends Lexer> lexerCtor = lexerClass.getConstructor(CharStream.class);
1144             final Constructor<? extends Parser> parserCtor = parserClass.getConstructor(TokenStream.class);
1145 
1146             // construct initial instances of the lexer and parser to deserialize their ATNs
1147             TokenSource tokenSource = lexerCtor.newInstance(new ANTLRInputStream(""));
1148             parserCtor.newInstance(new CommonTokenStream(tokenSource));
1149 
1150             return new ParserFactory() {
1151 
1152 				@Override
1153                 public FileParseResult parseFile(CharStream input, int currentPass, int thread) {
1154 					final MurmurHashChecksum checksum = new MurmurHashChecksum();
1155 
1156 					final long startTime = System.nanoTime();
1157 					assert thread >= 0 && thread < NUMBER_OF_THREADS;
1158 
1159                     try {
1160 						ParseTreeListener listener = sharedListeners[thread];
1161 						if (listener == null) {
1162 							listener = listenerClass.newInstance();
1163 							sharedListeners[thread] = listener;
1164 						}
1165 
1166 						Lexer lexer = sharedLexers[thread];
1167                         if (REUSE_LEXER && lexer != null) {
1168                             lexer.setInputStream(input);
1169                         } else {
1170 							Lexer previousLexer = lexer;
1171                             lexer = lexerCtor.newInstance(input);
1172 							DFA[] decisionToDFA = (FILE_GRANULARITY || previousLexer == null ? lexer : previousLexer).getInterpreter().decisionToDFA;
1173 							if (!REUSE_LEXER_DFA || (!FILE_GRANULARITY && previousLexer == null)) {
1174 								decisionToDFA = new DFA[decisionToDFA.length];
1175 							}
1176 
1177 							if (COMPUTE_TRANSITION_STATS) {
1178 								lexer.setInterpreter(new StatisticsLexerATNSimulator(lexer, lexer.getATN(), decisionToDFA, lexer.getInterpreter().getSharedContextCache()));
1179 							} else if (!REUSE_LEXER_DFA) {
1180 								lexer.setInterpreter(new LexerATNSimulator(lexer, lexer.getATN(), decisionToDFA, lexer.getInterpreter().getSharedContextCache()));
1181 							}
1182 
1183 							sharedLexers[thread] = lexer;
1184                         }
1185 
1186 						lexer.removeErrorListeners();
1187 						lexer.addErrorListener(DescriptiveErrorListener.INSTANCE);
1188 
1189 						if (lexer.getInterpreter().decisionToDFA[0] == null) {
1190 							ATN atn = lexer.getATN();
1191 							for (int i = 0; i < lexer.getInterpreter().decisionToDFA.length; i++) {
1192 								lexer.getInterpreter().decisionToDFA[i] = new DFA(atn.getDecisionState(i), i);
1193 							}
1194 						}
1195 
1196                         CommonTokenStream tokens = new CommonTokenStream(lexer);
1197                         tokens.fill();
1198                         tokenCount.addAndGet(currentPass, tokens.size());
1199 
1200 						if (COMPUTE_CHECKSUM) {
1201 							for (Token token : tokens.getTokens()) {
1202 								updateChecksum(checksum, token);
1203 							}
1204 						}
1205 
1206                         if (!RUN_PARSER) {
1207                             return new FileParseResult(input.getSourceName(), (int)checksum.getValue(), null, tokens.size(), startTime, lexer, null);
1208                         }
1209 
1210 						final long parseStartTime = System.nanoTime();
1211 						Parser parser = sharedParsers[thread];
1212                         if (REUSE_PARSER && parser != null) {
1213                             parser.setInputStream(tokens);
1214                         } else {
1215 							Parser previousParser = parser;
1216 
1217 							if (USE_PARSER_INTERPRETER) {
1218 								Parser referenceParser = parserCtor.newInstance(tokens);
1219 								parser = new ParserInterpreter(referenceParser.getGrammarFileName(), referenceParser.getVocabulary(), Arrays.asList(referenceParser.getRuleNames()), referenceParser.getATN(), tokens);
1220 							}
1221 							else {
1222 								parser = parserCtor.newInstance(tokens);
1223 							}
1224 
1225 							DFA[] decisionToDFA = (FILE_GRANULARITY || previousParser == null ? parser : previousParser).getInterpreter().decisionToDFA;
1226 							if (!REUSE_PARSER_DFA || (!FILE_GRANULARITY && previousParser == null)) {
1227 								decisionToDFA = new DFA[decisionToDFA.length];
1228 							}
1229 
1230 							if (COMPUTE_TRANSITION_STATS) {
1231 								parser.setInterpreter(new StatisticsParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
1232 							} else if (!REUSE_PARSER_DFA) {
1233 								parser.setInterpreter(new ParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
1234 							}
1235 
1236 							sharedParsers[thread] = parser;
1237                         }
1238 
1239 						parser.removeParseListeners();
1240 						parser.removeErrorListeners();
1241 						if (!TWO_STAGE_PARSING) {
1242 							parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
1243 							parser.addErrorListener(new SummarizingDiagnosticErrorListener());
1244 						}
1245 
1246 						if (parser.getInterpreter().decisionToDFA[0] == null) {
1247 							ATN atn = parser.getATN();
1248 							for (int i = 0; i < parser.getInterpreter().decisionToDFA.length; i++) {
1249 								parser.getInterpreter().decisionToDFA[i] = new DFA(atn.getDecisionState(i), i);
1250 							}
1251 						}
1252 
1253 						parser.getInterpreter().setPredictionMode(TWO_STAGE_PARSING ? PredictionMode.SLL : PREDICTION_MODE);
1254 						parser.setBuildParseTree(BUILD_PARSE_TREES);
1255 						if (!BUILD_PARSE_TREES && BLANK_LISTENER) {
1256 							parser.addParseListener(listener);
1257 						}
1258 						if (BAIL_ON_ERROR || TWO_STAGE_PARSING) {
1259 							parser.setErrorHandler(new BailErrorStrategy());
1260 						}
1261 
1262                         Method parseMethod = parserClass.getMethod(entryPoint);
1263                         Object parseResult;
1264 
1265 						try {
1266 							if (COMPUTE_CHECKSUM && !BUILD_PARSE_TREES) {
1267 								parser.addParseListener(new ChecksumParseTreeListener(checksum));
1268 							}
1269 
1270 							if (USE_PARSER_INTERPRETER) {
1271 								ParserInterpreter parserInterpreter = (ParserInterpreter)parser;
1272 								parseResult = parserInterpreter.parse(Collections.lastIndexOfSubList(Arrays.asList(parser.getRuleNames()), Collections.singletonList(entryPoint)));
1273 							}
1274 							else {
1275 								parseResult = parseMethod.invoke(parser);
1276 							}
1277 						} catch (InvocationTargetException ex) {
1278 							if (!TWO_STAGE_PARSING) {
1279 								throw ex;
1280 							}
1281 
1282 							String sourceName = tokens.getSourceName();
1283 							sourceName = sourceName != null && !sourceName.isEmpty() ? sourceName+": " : "";
1284 							if (REPORT_SECOND_STAGE_RETRY) {
1285 								System.err.println(sourceName+"Forced to retry with full context.");
1286 							}
1287 
1288 							if (!(ex.getCause() instanceof ParseCancellationException)) {
1289 								throw ex;
1290 							}
1291 
1292 							tokens.seek(0);
1293 							if (REUSE_PARSER && parser != null) {
1294 								parser.setInputStream(tokens);
1295 							} else {
1296 								Parser previousParser = parser;
1297 
1298 								if (USE_PARSER_INTERPRETER) {
1299 									Parser referenceParser = parserCtor.newInstance(tokens);
1300 									parser = new ParserInterpreter(referenceParser.getGrammarFileName(), referenceParser.getVocabulary(), Arrays.asList(referenceParser.getRuleNames()), referenceParser.getATN(), tokens);
1301 								}
1302 								else {
1303 									parser = parserCtor.newInstance(tokens);
1304 								}
1305 
1306 								DFA[] decisionToDFA = previousParser.getInterpreter().decisionToDFA;
1307 								if (COMPUTE_TRANSITION_STATS) {
1308 									parser.setInterpreter(new StatisticsParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
1309 								} else if (!REUSE_PARSER_DFA) {
1310 									parser.setInterpreter(new ParserATNSimulator(parser, parser.getATN(), decisionToDFA, parser.getInterpreter().getSharedContextCache()));
1311 								}
1312 
1313 								sharedParsers[thread] = parser;
1314 							}
1315 
1316 							parser.removeParseListeners();
1317 							parser.removeErrorListeners();
1318 							parser.addErrorListener(DescriptiveErrorListener.INSTANCE);
1319 							parser.addErrorListener(new SummarizingDiagnosticErrorListener());
1320 							parser.getInterpreter().setPredictionMode(PredictionMode.LL);
1321 							parser.setBuildParseTree(BUILD_PARSE_TREES);
1322 							if (COMPUTE_CHECKSUM && !BUILD_PARSE_TREES) {
1323 								parser.addParseListener(new ChecksumParseTreeListener(checksum));
1324 							}
1325 							if (!BUILD_PARSE_TREES && BLANK_LISTENER) {
1326 								parser.addParseListener(listener);
1327 							}
1328 							if (BAIL_ON_ERROR) {
1329 								parser.setErrorHandler(new BailErrorStrategy());
1330 							}
1331 
1332 							parseResult = parseMethod.invoke(parser);
1333 						}
1334 
1335 						assertThat(parseResult, instanceOf(ParseTree.class));
1336 						if (COMPUTE_CHECKSUM && BUILD_PARSE_TREES) {
1337 							ParseTreeWalker.DEFAULT.walk(new ChecksumParseTreeListener(checksum), (ParseTree)parseResult);
1338 						}
1339                         if (BUILD_PARSE_TREES && BLANK_LISTENER) {
1340                             ParseTreeWalker.DEFAULT.walk(listener, (ParseTree)parseResult);
1341                         }
1342 
1343 						return new FileParseResult(input.getSourceName(), (int)checksum.getValue(), (ParseTree)parseResult, tokens.size(), TIME_PARSE_ONLY ? parseStartTime : startTime, lexer, parser);
1344                     } catch (Exception e) {
1345 						if (!REPORT_SYNTAX_ERRORS && e instanceof ParseCancellationException) {
1346 							return new FileParseResult("unknown", (int)checksum.getValue(), null, 0, startTime, null, null);
1347 						}
1348 
1349                         e.printStackTrace(System.out);
1350                         throw new IllegalStateException(e);
1351                     }
1352                 }
1353             };
1354         } catch (Exception e) {
1355             e.printStackTrace(System.out);
1356             Assert.fail(e.getMessage());
1357             throw new IllegalStateException(e);
1358         }
1359     }
1360 
1361     protected interface ParserFactory {
1362         FileParseResult parseFile(CharStream input, int currentPass, int thread);
1363     }
1364 
1365 	protected static class FileParseResult {
1366 		public final String sourceName;
1367 		public final int checksum;
1368 		public final ParseTree parseTree;
1369 		public final int tokenCount;
1370 		public final long startTime;
1371 		public final long endTime;
1372 
1373 		public final int lexerDFASize;
1374 		public final long lexerTotalTransitions;
1375 		public final long lexerComputedTransitions;
1376 
1377 		public final int parserDFASize;
1378 		public final long[] decisionInvocations;
1379 		public final long[] fullContextFallback;
1380 		public final long[] nonSll;
1381 		public final long[] parserTotalTransitions;
1382 		public final long[] parserComputedTransitions;
1383 		public final long[] parserFullContextTransitions;
1384 
1385 		public FileParseResult(String sourceName, int checksum, ParseTree parseTree, int tokenCount, long startTime, Lexer lexer, Parser parser) {
1386 			this.sourceName = sourceName;
1387 			this.checksum = checksum;
1388 			this.parseTree = parseTree;
1389 			this.tokenCount = tokenCount;
1390 			this.startTime = startTime;
1391 			this.endTime = System.nanoTime();
1392 
1393 			if (lexer != null) {
1394 				LexerATNSimulator interpreter = lexer.getInterpreter();
1395 				if (interpreter instanceof StatisticsLexerATNSimulator) {
1396 					lexerTotalTransitions = ((StatisticsLexerATNSimulator)interpreter).totalTransitions;
1397 					lexerComputedTransitions = ((StatisticsLexerATNSimulator)interpreter).computedTransitions;
1398 				} else {
1399 					lexerTotalTransitions = 0;
1400 					lexerComputedTransitions = 0;
1401 				}
1402 
1403 				int dfaSize = 0;
1404 				for (DFA dfa : interpreter.decisionToDFA) {
1405 					if (dfa != null) {
1406 						dfaSize += dfa.states.size();
1407 					}
1408 				}
1409 
1410 				lexerDFASize = dfaSize;
1411 			} else {
1412 				lexerDFASize = 0;
1413 				lexerTotalTransitions = 0;
1414 				lexerComputedTransitions = 0;
1415 			}
1416 
1417 			if (parser != null) {
1418 				ParserATNSimulator interpreter = parser.getInterpreter();
1419 				if (interpreter instanceof StatisticsParserATNSimulator) {
1420 					decisionInvocations = ((StatisticsParserATNSimulator)interpreter).decisionInvocations;
1421 					fullContextFallback = ((StatisticsParserATNSimulator)interpreter).fullContextFallback;
1422 					nonSll = ((StatisticsParserATNSimulator)interpreter).nonSll;
1423 					parserTotalTransitions = ((StatisticsParserATNSimulator)interpreter).totalTransitions;
1424 					parserComputedTransitions = ((StatisticsParserATNSimulator)interpreter).computedTransitions;
1425 					parserFullContextTransitions = ((StatisticsParserATNSimulator)interpreter).fullContextTransitions;
1426 				} else {
1427 					decisionInvocations = new long[0];
1428 					fullContextFallback = new long[0];
1429 					nonSll = new long[0];
1430 					parserTotalTransitions = new long[0];
1431 					parserComputedTransitions = new long[0];
1432 					parserFullContextTransitions = new long[0];
1433 				}
1434 
1435 				int dfaSize = 0;
1436 				for (DFA dfa : interpreter.decisionToDFA) {
1437 					if (dfa != null) {
1438 						dfaSize += dfa.states.size();
1439 					}
1440 				}
1441 
1442 				parserDFASize = dfaSize;
1443 			} else {
1444 				parserDFASize = 0;
1445 				decisionInvocations = new long[0];
1446 				fullContextFallback = new long[0];
1447 				nonSll = new long[0];
1448 				parserTotalTransitions = new long[0];
1449 				parserComputedTransitions = new long[0];
1450 				parserFullContextTransitions = new long[0];
1451 			}
1452 		}
1453 	}
1454 
1455 	private static class StatisticsLexerATNSimulator extends LexerATNSimulator {
1456 
1457 		public long totalTransitions;
1458 		public long computedTransitions;
1459 
1460 		public StatisticsLexerATNSimulator(ATN atn, DFA[] decisionToDFA, PredictionContextCache sharedContextCache) {
1461 			super(atn, decisionToDFA, sharedContextCache);
1462 		}
1463 
1464 		public StatisticsLexerATNSimulator(Lexer recog, ATN atn, DFA[] decisionToDFA, PredictionContextCache sharedContextCache) {
1465 			super(recog, atn, decisionToDFA, sharedContextCache);
1466 		}
1467 
1468 		@Override
1469 		protected DFAState getExistingTargetState(DFAState s, int t) {
1470 			totalTransitions++;
1471 			return super.getExistingTargetState(s, t);
1472 		}
1473 
1474 		@Override
1475 		protected DFAState computeTargetState(CharStream input, DFAState s, int t) {
1476 			computedTransitions++;
1477 			return super.computeTargetState(input, s, t);
1478 		}
1479 	}
1480 
1481 	private static class StatisticsParserATNSimulator extends ParserATNSimulator {
1482 
1483 		public final long[] decisionInvocations;
1484 		public final long[] fullContextFallback;
1485 		public final long[] nonSll;
1486 		public final long[] totalTransitions;
1487 		public final long[] computedTransitions;
1488 		public final long[] fullContextTransitions;
1489 
1490 		private int decision;
1491 
1492 		public StatisticsParserATNSimulator(ATN atn, DFA[] decisionToDFA, PredictionContextCache sharedContextCache) {
1493 			super(atn, decisionToDFA, sharedContextCache);
1494 			decisionInvocations = new long[atn.decisionToState.size()];
1495 			fullContextFallback = new long[atn.decisionToState.size()];
1496 			nonSll = new long[atn.decisionToState.size()];
1497 			totalTransitions = new long[atn.decisionToState.size()];
1498 			computedTransitions = new long[atn.decisionToState.size()];
1499 			fullContextTransitions = new long[atn.decisionToState.size()];
1500 		}
1501 
1502 		public StatisticsParserATNSimulator(Parser parser, ATN atn, DFA[] decisionToDFA, PredictionContextCache sharedContextCache) {
1503 			super(parser, atn, decisionToDFA, sharedContextCache);
1504 			decisionInvocations = new long[atn.decisionToState.size()];
1505 			fullContextFallback = new long[atn.decisionToState.size()];
1506 			nonSll = new long[atn.decisionToState.size()];
1507 			totalTransitions = new long[atn.decisionToState.size()];
1508 			computedTransitions = new long[atn.decisionToState.size()];
1509 			fullContextTransitions = new long[atn.decisionToState.size()];
1510 		}
1511 
1512 		@Override
1513 		public int adaptivePredict(TokenStream input, int decision, ParserRuleContext outerContext) {
1514 			try {
1515 				this.decision = decision;
1516 				decisionInvocations[decision]++;
1517 				return super.adaptivePredict(input, decision, outerContext);
1518 			}
1519 			finally {
1520 				this.decision = -1;
1521 			}
1522 		}
1523 
1524 		@Override
1525 		protected int execATNWithFullContext(DFA dfa, DFAState D, ATNConfigSet s0, TokenStream input, int startIndex, ParserRuleContext outerContext) {
1526 			fullContextFallback[decision]++;
1527 			return super.execATNWithFullContext(dfa, D, s0, input, startIndex, outerContext);
1528 		}
1529 
1530 		@Override
1531 		protected DFAState getExistingTargetState(DFAState previousD, int t) {
1532 			totalTransitions[decision]++;
1533 			return super.getExistingTargetState(previousD, t);
1534 		}
1535 
1536 		@Override
1537 		protected DFAState computeTargetState(DFA dfa, DFAState previousD, int t) {
1538 			computedTransitions[decision]++;
1539 			return super.computeTargetState(dfa, previousD, t);
1540 		}
1541 
1542 		@Override
1543 		protected ATNConfigSet computeReachSet(ATNConfigSet closure, int t, boolean fullCtx) {
1544 			if (fullCtx) {
1545 				totalTransitions[decision]++;
1546 				computedTransitions[decision]++;
1547 				fullContextTransitions[decision]++;
1548 			}
1549 
1550 			return super.computeReachSet(closure, t, fullCtx);
1551 		}
1552 	}
1553 
1554 	private static class DescriptiveErrorListener extends BaseErrorListener {
1555 		public static DescriptiveErrorListener INSTANCE = new DescriptiveErrorListener();
1556 
1557 		@Override
1558 		public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol,
1559 								int line, int charPositionInLine,
1560 								String msg, RecognitionException e)
1561 		{
1562 			if (!REPORT_SYNTAX_ERRORS) {
1563 				return;
1564 			}
1565 
1566 			String sourceName = recognizer.getInputStream().getSourceName();
1567 			if (!sourceName.isEmpty()) {
1568 				sourceName = String.format("%s:%d:%d: ", sourceName, line, charPositionInLine);
1569 			}
1570 
1571 			System.err.println(sourceName+"line "+line+":"+charPositionInLine+" "+msg);
1572 		}
1573 
1574 	}
1575 
1576 	private static class SummarizingDiagnosticErrorListener extends DiagnosticErrorListener {
1577 		private BitSet _sllConflict;
1578 		private ATNConfigSet _sllConfigs;
1579 
1580 		@Override
1581 		public void reportAmbiguity(Parser recognizer, DFA dfa, int startIndex, int stopIndex, boolean exact, BitSet ambigAlts, ATNConfigSet configs) {
1582 			if (COMPUTE_TRANSITION_STATS && DETAILED_DFA_STATE_STATS) {
1583 				BitSet sllPredictions = getConflictingAlts(_sllConflict, _sllConfigs);
1584 				int sllPrediction = sllPredictions.nextSetBit(0);
1585 				BitSet llPredictions = getConflictingAlts(ambigAlts, configs);
1586 				int llPrediction = llPredictions.cardinality() == 0 ? ATN.INVALID_ALT_NUMBER : llPredictions.nextSetBit(0);
1587 				if (sllPrediction != llPrediction) {
1588 					((StatisticsParserATNSimulator)recognizer.getInterpreter()).nonSll[dfa.decision]++;
1589 				}
1590 			}
1591 
1592 			if (!REPORT_AMBIGUITIES) {
1593 				return;
1594 			}
1595 
1596 			// show the rule name along with the decision
1597 			String format = "reportAmbiguity d=%d (%s): ambigAlts=%s, input='%s'";
1598 			int decision = dfa.decision;
1599 			String rule = recognizer.getRuleNames()[dfa.atnStartState.ruleIndex];
1600 			String input = recognizer.getTokenStream().getText(Interval.of(startIndex, stopIndex));
1601 			recognizer.notifyErrorListeners(String.format(format, decision, rule, ambigAlts, input));
1602 		}
1603 
1604 		@Override
1605 		public void reportAttemptingFullContext(Parser recognizer, DFA dfa, int startIndex, int stopIndex, BitSet conflictingAlts, ATNConfigSet configs) {
1606 			_sllConflict = conflictingAlts;
1607 			_sllConfigs = configs;
1608 			if (!REPORT_FULL_CONTEXT) {
1609 				return;
1610 			}
1611 
1612 			// show the rule name and viable configs along with the base info
1613 			String format = "reportAttemptingFullContext d=%d (%s), input='%s', viable=%s";
1614 			int decision = dfa.decision;
1615 			String rule = recognizer.getRuleNames()[dfa.atnStartState.ruleIndex];
1616 			String input = recognizer.getTokenStream().getText(Interval.of(startIndex, stopIndex));
1617 			BitSet representedAlts = getConflictingAlts(conflictingAlts, configs);
1618 			recognizer.notifyErrorListeners(String.format(format, decision, rule, input, representedAlts));
1619 		}
1620 
1621 		@Override
1622 		public void reportContextSensitivity(Parser recognizer, DFA dfa, int startIndex, int stopIndex, int prediction, ATNConfigSet configs) {
1623 			if (COMPUTE_TRANSITION_STATS && DETAILED_DFA_STATE_STATS) {
1624 				BitSet sllPredictions = getConflictingAlts(_sllConflict, _sllConfigs);
1625 				int sllPrediction = sllPredictions.nextSetBit(0);
1626 				if (sllPrediction != prediction) {
1627 					((StatisticsParserATNSimulator)recognizer.getInterpreter()).nonSll[dfa.decision]++;
1628 				}
1629 			}
1630 
1631 			if (!REPORT_CONTEXT_SENSITIVITY) {
1632 				return;
1633 			}
1634 
1635 			// show the rule name and viable configs along with the base info
1636 			String format = "reportContextSensitivity d=%d (%s), input='%s', viable={%d}";
1637 			int decision = dfa.decision;
1638 			String rule = recognizer.getRuleNames()[dfa.atnStartState.ruleIndex];
1639 			String input = recognizer.getTokenStream().getText(Interval.of(startIndex, stopIndex));
1640 			recognizer.notifyErrorListeners(String.format(format, decision, rule, input, prediction));
1641 		}
1642 
1643 	}
1644 
1645 	protected static final class FilenameFilters {
1646 		public static final FilenameFilter ALL_FILES = new FilenameFilter() {
1647 
1648 			@Override
1649 			public boolean accept(File dir, String name) {
1650 				return true;
1651 			}
1652 
1653 		};
1654 
1655 		public static FilenameFilter extension(String extension) {
1656 			return extension(extension, true);
1657 		}
1658 
1659 		public static FilenameFilter extension(String extension, boolean caseSensitive) {
1660 			return new FileExtensionFilenameFilter(extension, caseSensitive);
1661 		}
1662 
1663 		public static FilenameFilter name(String filename) {
1664 			return name(filename, true);
1665 		}
1666 
1667 		public static FilenameFilter name(String filename, boolean caseSensitive) {
1668 			return new FileNameFilenameFilter(filename, caseSensitive);
1669 		}
1670 
1671 		public static FilenameFilter all(FilenameFilter... filters) {
1672 			return new AllFilenameFilter(filters);
1673 		}
1674 
1675 		public static FilenameFilter any(FilenameFilter... filters) {
1676 			return new AnyFilenameFilter(filters);
1677 		}
1678 
1679 		public static FilenameFilter none(FilenameFilter... filters) {
1680 			return not(any(filters));
1681 		}
1682 
1683 		public static FilenameFilter not(FilenameFilter filter) {
1684 			return new NotFilenameFilter(filter);
1685 		}
1686 
1687 		private FilenameFilters() {
1688 		}
1689 
1690 		protected static class FileExtensionFilenameFilter implements FilenameFilter {
1691 
1692 			private final String extension;
1693 			private final boolean caseSensitive;
1694 
1695 			public FileExtensionFilenameFilter(String extension, boolean caseSensitive) {
1696 				if (!extension.startsWith(".")) {
1697 					extension = '.' + extension;
1698 				}
1699 
1700 				this.extension = extension;
1701 				this.caseSensitive = caseSensitive;
1702 			}
1703 
1704 			@Override
1705 			public boolean accept(File dir, String name) {
1706 				if (caseSensitive) {
1707 					return name.endsWith(extension);
1708 				} else {
1709 					return name.toLowerCase().endsWith(extension);
1710 				}
1711 			}
1712 		}
1713 
1714 		protected static class FileNameFilenameFilter implements FilenameFilter {
1715 
1716 			private final String filename;
1717 			private final boolean caseSensitive;
1718 
1719 			public FileNameFilenameFilter(String filename, boolean caseSensitive) {
1720 				this.filename = filename;
1721 				this.caseSensitive = caseSensitive;
1722 			}
1723 
1724 			@Override
1725 			public boolean accept(File dir, String name) {
1726 				if (caseSensitive) {
1727 					return name.equals(filename);
1728 				} else {
1729 					return name.toLowerCase().equals(filename);
1730 				}
1731 			}
1732 		}
1733 
1734 		protected static class AllFilenameFilter implements FilenameFilter {
1735 
1736 			private final FilenameFilter[] filters;
1737 
1738 			public AllFilenameFilter(FilenameFilter[] filters) {
1739 				this.filters = filters;
1740 			}
1741 
1742 			@Override
1743 			public boolean accept(File dir, String name) {
1744 				for (FilenameFilter filter : filters) {
1745 					if (!filter.accept(dir, name)) {
1746 						return false;
1747 					}
1748 				}
1749 
1750 				return true;
1751 			}
1752 		}
1753 
1754 		protected static class AnyFilenameFilter implements FilenameFilter {
1755 
1756 			private final FilenameFilter[] filters;
1757 
1758 			public AnyFilenameFilter(FilenameFilter[] filters) {
1759 				this.filters = filters;
1760 			}
1761 
1762 			@Override
1763 			public boolean accept(File dir, String name) {
1764 				for (FilenameFilter filter : filters) {
1765 					if (filter.accept(dir, name)) {
1766 						return true;
1767 					}
1768 				}
1769 
1770 				return false;
1771 			}
1772 		}
1773 
1774 		protected static class NotFilenameFilter implements FilenameFilter {
1775 
1776 			private final FilenameFilter filter;
1777 
1778 			public NotFilenameFilter(FilenameFilter filter) {
1779 				this.filter = filter;
1780 			}
1781 
1782 			@Override
1783 			public boolean accept(File dir, String name) {
1784 				return !filter.accept(dir, name);
1785 			}
1786 		}
1787 	}
1788 
1789 	protected static class NumberedThread extends Thread {
1790 		private final int threadNumber;
1791 
1792 		public NumberedThread(Runnable target, int threadNumber) {
1793 			super(target);
1794 			this.threadNumber = threadNumber;
1795 		}
1796 
1797 		public final int getThreadNumber() {
1798 			return threadNumber;
1799 		}
1800 
1801 	}
1802 
1803 	protected static class NumberedThreadFactory implements ThreadFactory {
1804 		private final AtomicInteger nextThread = new AtomicInteger();
1805 
1806 		@Override
1807 		public Thread newThread(Runnable r) {
1808 			int threadNumber = nextThread.getAndIncrement();
1809 			assert threadNumber < NUMBER_OF_THREADS;
1810 			return new NumberedThread(r, threadNumber);
1811 		}
1812 
1813 	}
1814 
1815 	protected static class FixedThreadNumberFactory implements ThreadFactory {
1816 		private final int threadNumber;
1817 
1818 		public FixedThreadNumberFactory(int threadNumber) {
1819 			this.threadNumber = threadNumber;
1820 		}
1821 
1822 		@Override
1823 		public Thread newThread(Runnable r) {
1824 			assert threadNumber < NUMBER_OF_THREADS;
1825 			return new NumberedThread(r, threadNumber);
1826 		}
1827 	}
1828 
1829 	protected static class ChecksumParseTreeListener implements ParseTreeListener {
1830 		private static final int VISIT_TERMINAL = 1;
1831 		private static final int VISIT_ERROR_NODE = 2;
1832 		private static final int ENTER_RULE = 3;
1833 		private static final int EXIT_RULE = 4;
1834 
1835 		private final MurmurHashChecksum checksum;
1836 
1837 		public ChecksumParseTreeListener(MurmurHashChecksum checksum) {
1838 			this.checksum = checksum;
1839 		}
1840 
1841 		@Override
1842 		public void visitTerminal(TerminalNode node) {
1843 			checksum.update(VISIT_TERMINAL);
1844 			updateChecksum(checksum, node.getSymbol());
1845 		}
1846 
1847 		@Override
1848 		public void visitErrorNode(ErrorNode node) {
1849 			checksum.update(VISIT_ERROR_NODE);
1850 			updateChecksum(checksum, node.getSymbol());
1851 		}
1852 
1853 		@Override
1854 		public void enterEveryRule(ParserRuleContext ctx) {
1855 			checksum.update(ENTER_RULE);
1856 			updateChecksum(checksum, ctx.getRuleIndex());
1857 			updateChecksum(checksum, ctx.getStart());
1858 		}
1859 
1860 		@Override
1861 		public void exitEveryRule(ParserRuleContext ctx) {
1862 			checksum.update(EXIT_RULE);
1863 			updateChecksum(checksum, ctx.getRuleIndex());
1864 			updateChecksum(checksum, ctx.getStop());
1865 		}
1866 
1867 	}
1868 
1869 	protected static final class InputDescriptor {
1870 		private final String source;
1871 		private Reference<CloneableANTLRFileStream> inputStream;
1872 
1873 		public InputDescriptor(String source) {
1874 			this.source = source;
1875 			if (PRELOAD_SOURCES) {
1876 				getInputStream();
1877 			}
1878 		}
1879 
1880 
1881 		public synchronized CharStream getInputStream() {
1882 			CloneableANTLRFileStream stream = inputStream != null ? inputStream.get() : null;
1883 			if (stream == null) {
1884 				try {
1885 					stream = new CloneableANTLRFileStream(source, ENCODING);
1886 				} catch (IOException ex) {
1887 					throw new RuntimeException(ex);
1888 				}
1889 
1890 				if (PRELOAD_SOURCES) {
1891 					inputStream = new StrongReference<CloneableANTLRFileStream>(stream);
1892 				} else {
1893 					inputStream = new SoftReference<CloneableANTLRFileStream>(stream);
1894 				}
1895 			}
1896 
1897 			return new JavaUnicodeInputStream(stream.createCopy());
1898 		}
1899 	}
1900 
1901 	protected static class CloneableANTLRFileStream extends ANTLRFileStream {
1902 
1903 		public CloneableANTLRFileStream(String fileName, String encoding) throws IOException {
1904 			super(fileName, encoding);
1905 		}
1906 
1907 		public ANTLRInputStream createCopy() {
1908 			ANTLRInputStream stream = new ANTLRInputStream(this.data, this.n);
1909 			stream.name = this.getSourceName();
1910 			return stream;
1911 		}
1912 	}
1913 
1914 	public static class StrongReference<T> extends WeakReference<T> {
1915 		public final T referent;
1916 
1917 		public StrongReference(T referent) {
1918 			super(referent);
1919 			this.referent = referent;
1920 		}
1921 
1922 		@Override
1923 		public T get() {
1924 			return referent;
1925 		}
1926 	}
1927 
1928 	private static class MurmurHashChecksum {
1929 		private int value;
1930 		private int count;
1931 
1932 		public MurmurHashChecksum() {
1933 			this.value = MurmurHash.initialize();
1934 		}
1935 
1936 		public void update(int value) {
1937 			this.value = MurmurHash.update(this.value, value);
1938 			this.count++;
1939 		}
1940 
1941 		public int getValue() {
1942 			return MurmurHash.finish(value, count);
1943 		}
1944 	}
1945 
1946 	@Test(timeout = 20000)
1947 	public void testExponentialInclude() {
1948 		String grammarFormat =
1949 			"parser grammar Level_%d_%d;\n" +
1950 			"\n" +
1951 			"%s import Level_%d_1, Level_%d_2;\n" +
1952 			"\n" +
1953 			"rule_%d_%d : EOF;\n";
1954 
1955 		BaseRuntimeTest.mkdir(tmpdir);
1956 
1957 		long startTime = System.nanoTime();
1958 
1959 		int levels = 20;
1960 		for (int level = 0; level < levels; level++) {
1961 			String leafPrefix = level == levels - 1 ? "//" : "";
1962 			String grammar1 = String.format(grammarFormat, level, 1, leafPrefix, level + 1, level + 1, level, 1);
1963 			writeFile(tmpdir, "Level_" + level + "_1.g4", grammar1);
1964 			if (level > 0) {
1965 				String grammar2 = String.format(grammarFormat, level, 2, leafPrefix, level + 1, level + 1, level, 1);
1966 				writeFile(tmpdir, "Level_" + level + "_2.g4", grammar2);
1967 			}
1968 		}
1969 
1970 		ErrorQueue equeue = BaseRuntimeTest.antlrOnString(tmpdir, "Java", "Level_0_1.g4", false);
1971 		Assert.assertTrue(equeue.errors.isEmpty());
1972 
1973 		long endTime = System.nanoTime();
1974 		System.out.format("%s milliseconds.%n", (endTime - startTime) / 1000000.0);
1975 	}
1976 }
1977