org.antlr.Tool Maven / Gradle / Ivy
/*
* [The "BSD license"]
* Copyright (c) 2010 Terence Parr
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.antlr;
import antlr.ANTLRException;
import org.antlr.analysis.*;
import org.antlr.codegen.CodeGenerator;
import org.antlr.misc.Graph;
import org.antlr.runtime.misc.Stats;
import org.antlr.tool.*;
import java.io.*;
import java.util.*;
/** The main ANTLR entry point. Read a grammar and generate a parser. */
public class Tool {
public final Properties antlrSettings = new Properties();
public String VERSION = "!Unknown version!";
//public static final String VERSION = "${project.version}";
public static final String UNINITIALIZED_DIR = "";
private List grammarFileNames = new ArrayList();
private boolean generate_NFA_dot = false;
private boolean generate_DFA_dot = false;
private String outputDirectory = ".";
private boolean haveOutputDir = false;
private String inputDirectory = null;
private String parentGrammarDirectory;
private String grammarOutputDirectory;
private boolean haveInputDir = false;
private String libDirectory = ".";
private boolean debug = false;
private boolean trace = false;
private boolean profile = false;
private boolean report = false;
private boolean printGrammar = false;
private boolean depend = false;
private boolean forceAllFilesToOutputDir = false;
private boolean forceRelativeOutput = false;
protected boolean deleteTempLexer = true;
private boolean verbose = false;
/** Don't process grammar file if generated files are newer than grammar */
private boolean make = false;
private boolean showBanner = true;
private static boolean exitNow = false;
// The internal options are for my use on the command line during dev
//
public static boolean internalOption_PrintGrammarTree = false;
public static boolean internalOption_PrintDFA = false;
public static boolean internalOption_ShowNFAConfigsInDFA = false;
public static boolean internalOption_watchNFAConversion = false;
/**
* A list of dependency generators that are accumulated aaaas (and if) the
* tool is required to sort the provided grammars into build dependency order.
protected Map buildDependencyGenerators;
*/
public static void main(String[] args) {
Tool antlr = new Tool(args);
if (!exitNow) {
antlr.process();
if (ErrorManager.getNumErrors() > 0) {
System.exit(1);
}
System.exit(0);
}
}
/**
* Load the properties file org/antlr/antlr.properties and populate any
* variables that must be initialized from it, such as the version of ANTLR.
*/
private void loadResources() {
InputStream in = null;
in = this.getClass().getResourceAsStream("antlr.properties");
// If we found the resource, then load it, otherwise revert to the
// defaults.
//
if (in != null) {
try {
// Load the resources into the map
//
antlrSettings.load(in);
// Set any variables that we need to populate from the resources
//
VERSION = antlrSettings.getProperty("antlr.version");
} catch (Exception e) {
// Do nothing, just leave the defaults in place
}
}
}
public Tool() {
loadResources();
}
public Tool(String[] args) {
loadResources();
// Set all the options and pick up all the named grammar files
//
processArgs(args);
}
public void processArgs(String[] args) {
if (isVerbose()) {
ErrorManager.info("ANTLR Parser Generator Version " + VERSION);
showBanner = false;
}
if (args == null || args.length == 0) {
help();
return;
}
for (int i = 0; i < args.length; i++) {
if (args[i].equals("-o") || args[i].equals("-fo")) {
if (i + 1 >= args.length) {
System.err.println("missing output directory with -fo/-o option; ignoring");
}
else {
if (args[i].equals("-fo")) { // force output into dir
setForceAllFilesToOutputDir(true);
}
i++;
outputDirectory = args[i];
if (outputDirectory.endsWith("/") ||
outputDirectory.endsWith("\\")) {
outputDirectory =
outputDirectory.substring(0, getOutputDirectory().length() - 1);
}
File outDir = new File(outputDirectory);
haveOutputDir = true;
if (outDir.exists() && !outDir.isDirectory()) {
ErrorManager.error(ErrorManager.MSG_OUTPUT_DIR_IS_FILE, outputDirectory);
setLibDirectory(".");
}
}
}
else if (args[i].equals("-lib")) {
if (i + 1 >= args.length) {
System.err.println("missing library directory with -lib option; ignoring");
}
else {
i++;
setLibDirectory(args[i]);
if (getLibraryDirectory().endsWith("/") ||
getLibraryDirectory().endsWith("\\")) {
setLibDirectory(getLibraryDirectory().substring(0, getLibraryDirectory().length() - 1));
}
File outDir = new File(getLibraryDirectory());
if (!outDir.exists()) {
ErrorManager.error(ErrorManager.MSG_DIR_NOT_FOUND, getLibraryDirectory());
setLibDirectory(".");
}
}
}
else if (args[i].equals("-nfa")) {
setGenerate_NFA_dot(true);
}
else if (args[i].equals("-dfa")) {
setGenerate_DFA_dot(true);
}
else if (args[i].equals("-debug")) {
setDebug(true);
}
else if (args[i].equals("-trace")) {
setTrace(true);
}
else if (args[i].equals("-report")) {
setReport(true);
}
else if (args[i].equals("-profile")) {
setProfile(true);
}
else if (args[i].equals("-print")) {
setPrintGrammar(true);
}
else if (args[i].equals("-depend")) {
setDepend(true);
}
else if (args[i].equals("-verbose")) {
setVerbose(true);
}
else if (args[i].equals("-version")) {
version();
exitNow = true;
}
else if (args[i].equals("-make")) {
setMake(true);
}
else if (args[i].equals("-message-format")) {
if (i + 1 >= args.length) {
System.err.println("missing output format with -message-format option; using default");
}
else {
i++;
ErrorManager.setFormat(args[i]);
}
}
else if (args[i].equals("-Xgrtree")) {
internalOption_PrintGrammarTree = true; // print grammar tree
}
else if (args[i].equals("-Xdfa")) {
internalOption_PrintDFA = true;
}
else if (args[i].equals("-Xnoprune")) {
DFAOptimizer.PRUNE_EBNF_EXIT_BRANCHES = false;
}
else if (args[i].equals("-Xnocollapse")) {
DFAOptimizer.COLLAPSE_ALL_PARALLEL_EDGES = false;
}
else if (args[i].equals("-Xdbgconversion")) {
NFAToDFAConverter.debug = true;
}
else if (args[i].equals("-Xmultithreaded")) {
NFAToDFAConverter.SINGLE_THREADED_NFA_CONVERSION = false;
}
else if (args[i].equals("-Xnomergestopstates")) {
DFAOptimizer.MERGE_STOP_STATES = false;
}
else if (args[i].equals("-Xdfaverbose")) {
internalOption_ShowNFAConfigsInDFA = true;
}
else if (args[i].equals("-Xwatchconversion")) {
internalOption_watchNFAConversion = true;
}
else if (args[i].equals("-XdbgST")) {
CodeGenerator.EMIT_TEMPLATE_DELIMITERS = true;
}
else if (args[i].equals("-Xmaxinlinedfastates")) {
if (i + 1 >= args.length) {
System.err.println("missing max inline dfa states -Xmaxinlinedfastates option; ignoring");
}
else {
i++;
CodeGenerator.MAX_ACYCLIC_DFA_STATES_INLINE = Integer.parseInt(args[i]);
}
}
else if (args[i].equals("-Xmaxswitchcaselabels")) {
if (i + 1 >= args.length) {
System.err.println("missing max switch case labels -Xmaxswitchcaselabels option; ignoring");
}
else {
i++;
CodeGenerator.MAX_SWITCH_CASE_LABELS = Integer.parseInt(args[i]);
}
}
else if (args[i].equals("-Xminswitchalts")) {
if (i + 1 >= args.length) {
System.err.println("missing min switch alternatives -Xminswitchalts option; ignoring");
}
else {
i++;
CodeGenerator.MIN_SWITCH_ALTS = Integer.parseInt(args[i]);
}
}
else if (args[i].equals("-Xm")) {
if (i + 1 >= args.length) {
System.err.println("missing max recursion with -Xm option; ignoring");
}
else {
i++;
NFAContext.MAX_SAME_RULE_INVOCATIONS_PER_NFA_CONFIG_STACK = Integer.parseInt(args[i]);
}
}
else if (args[i].equals("-Xmaxdfaedges")) {
if (i + 1 >= args.length) {
System.err.println("missing max number of edges with -Xmaxdfaedges option; ignoring");
}
else {
i++;
DFA.MAX_STATE_TRANSITIONS_FOR_TABLE = Integer.parseInt(args[i]);
}
}
else if (args[i].equals("-Xconversiontimeout")) {
if (i + 1 >= args.length) {
System.err.println("missing max time in ms -Xconversiontimeout option; ignoring");
}
else {
i++;
DFA.MAX_TIME_PER_DFA_CREATION = Integer.parseInt(args[i]);
}
}
else if (args[i].equals("-Xnfastates")) {
DecisionProbe.verbose = true;
}
else if (args[i].equals("-Xsavelexer")) {
deleteTempLexer = false;
}
else if (args[i].equals("-X")) {
Xhelp();
}
else {
if (args[i].charAt(0) != '-') {
// Must be the grammar file
addGrammarFile(args[i]);
}
}
}
}
/*
protected void checkForInvalidArguments(String[] args, BitSet cmdLineArgValid) {
// check for invalid command line args
for (int a = 0; a < args.length; a++) {
if (!cmdLineArgValid.member(a)) {
System.err.println("invalid command-line argument: " + args[a] + "; ignored");
}
}
}
*/
/**
* Checks to see if the list of outputFiles all exist, and have
* last-modified timestamps which are later than the last-modified
* timestamp of all the grammar files involved in build the output
* (imports must be checked). If these conditions hold, the method
* returns false, otherwise, it returns true.
*
* @param grammarFileName The grammar file we are checking
*/
public boolean buildRequired(String grammarFileName)
throws IOException, ANTLRException
{
BuildDependencyGenerator bd =
new BuildDependencyGenerator(this, grammarFileName);
List outputFiles = bd.getGeneratedFileList();
List inputFiles = bd.getDependenciesFileList();
// Note that input directory must be set to use buildRequired
File grammarFile;
if (haveInputDir) {
grammarFile = new File(inputDirectory, grammarFileName);
}
else {
grammarFile = new File(grammarFileName);
}
long grammarLastModified = grammarFile.lastModified();
for (File outputFile : outputFiles) {
if (!outputFile.exists() || grammarLastModified > outputFile.lastModified()) {
// One of the output files does not exist or is out of date, so we must build it
return true;
}
// Check all of the imported grammars and see if any of these are younger
// than any of the output files.
if (inputFiles != null) {
for (File inputFile : inputFiles) {
if (inputFile.lastModified() > outputFile.lastModified()) {
// One of the imported grammar files has been updated so we must build
return true;
}
}
}
}
if (isVerbose()) {
System.out.println("Grammar " + grammarFile + " is up to date - build skipped");
}
return false;
}
public void process() {
boolean exceptionWhenWritingLexerFile = false;
String lexerGrammarFileName = null; // necessary at this scope to have access in the catch below
// Have to be tricky here when Maven or build tools call in and must new Tool()
// before setting options. The banner won't display that way!
if (isVerbose() && showBanner) {
ErrorManager.info("ANTLR Parser Generator Version " + VERSION);
showBanner = false;
}
try {
sortGrammarFiles(); // update grammarFileNames
}
catch (Exception e) {
ErrorManager.error(ErrorManager.MSG_INTERNAL_ERROR,e);
}
catch (Error e) {
ErrorManager.error(ErrorManager.MSG_INTERNAL_ERROR, e);
}
for (String grammarFileName : grammarFileNames) {
// If we are in make mode (to support build tools like Maven) and the
// file is already up to date, then we do not build it (and in verbose mode
// we will say so).
if (make) {
try {
if ( !buildRequired(grammarFileName) ) continue;
}
catch (Exception e) {
ErrorManager.error(ErrorManager.MSG_INTERNAL_ERROR,e);
}
}
if (isVerbose() && !isDepend()) {
System.out.println(grammarFileName);
}
try {
if (isDepend()) {
BuildDependencyGenerator dep =
new BuildDependencyGenerator(this, grammarFileName);
/*
List outputFiles = dep.getGeneratedFileList();
List dependents = dep.getDependenciesFileList();
System.out.println("output: "+outputFiles);
System.out.println("dependents: "+dependents);
*/
System.out.println(dep.getDependencies());
continue;
}
Grammar grammar = getRootGrammar(grammarFileName);
// we now have all grammars read in as ASTs
// (i.e., root and all delegates)
grammar.composite.assignTokenTypes();
grammar.composite.defineGrammarSymbols();
grammar.composite.createNFAs();
generateRecognizer(grammar);
if (isPrintGrammar()) {
grammar.printGrammar(System.out);
}
if (isReport()) {
GrammarReport2 greport = new GrammarReport2(grammar);
System.out.print(greport.toString());
// GrammarReport greport = new GrammarReport(grammar);
// System.out.println(greport.toString());
// // print out a backtracking report too (that is not encoded into log)
// System.out.println(greport.getBacktrackingReport());
}
if (isProfile()) {
GrammarReport greport = new GrammarReport(grammar);
Stats.writeReport(GrammarReport.GRAMMAR_STATS_FILENAME,
greport.toNotifyString());
}
// now handle the lexer if one was created for a merged spec
String lexerGrammarStr = grammar.getLexerGrammar();
//System.out.println("lexer grammar:\n"+lexerGrammarStr);
if (grammar.type == Grammar.COMBINED && lexerGrammarStr != null) {
lexerGrammarFileName = grammar.getImplicitlyGeneratedLexerFileName();
try {
Writer w = getOutputFile(grammar, lexerGrammarFileName);
w.write(lexerGrammarStr);
w.close();
}
catch (IOException e) {
// emit different error message when creating the implicit lexer fails
// due to write permission error
exceptionWhenWritingLexerFile = true;
throw e;
}
try {
StringReader sr = new StringReader(lexerGrammarStr);
Grammar lexerGrammar = new Grammar();
lexerGrammar.composite.watchNFAConversion = internalOption_watchNFAConversion;
lexerGrammar.implicitLexer = true;
lexerGrammar.setTool(this);
File lexerGrammarFullFile =
new File(getFileDirectory(lexerGrammarFileName), lexerGrammarFileName);
lexerGrammar.setFileName(lexerGrammarFullFile.toString());
lexerGrammar.importTokenVocabulary(grammar);
lexerGrammar.parseAndBuildAST(sr);
sr.close();
lexerGrammar.composite.assignTokenTypes();
lexerGrammar.composite.defineGrammarSymbols();
lexerGrammar.composite.createNFAs();
generateRecognizer(lexerGrammar);
}
finally {
// make sure we clean up
if (deleteTempLexer) {
File outputDir = getOutputDirectory(lexerGrammarFileName);
File outputFile = new File(outputDir, lexerGrammarFileName);
outputFile.delete();
}
}
}
}
catch (IOException e) {
if (exceptionWhenWritingLexerFile) {
ErrorManager.error(ErrorManager.MSG_CANNOT_WRITE_FILE,
lexerGrammarFileName, e);
}
else {
ErrorManager.error(ErrorManager.MSG_CANNOT_OPEN_FILE,
grammarFileName);
}
}
catch (Exception e) {
ErrorManager.error(ErrorManager.MSG_INTERNAL_ERROR, grammarFileName, e);
}
/*
finally {
System.out.println("creates="+ Interval.creates);
System.out.println("hits="+ Interval.hits);
System.out.println("misses="+ Interval.misses);
System.out.println("outOfRange="+ Interval.outOfRange);
}
*/
}
}
public void sortGrammarFiles() throws IOException {
//System.out.println("Grammar names "+getGrammarFileNames());
Graph g = new Graph();
List missingFiles = new ArrayList();
for (String gfile : grammarFileNames) {
try {
GrammarSpelunker grammar = new GrammarSpelunker(inputDirectory, gfile);
grammar.parse();
String vocabName = grammar.getTokenVocab();
String grammarName = grammar.getGrammarName();
// Make all grammars depend on any tokenVocab options
if ( vocabName!=null ) g.addEdge(gfile, vocabName+CodeGenerator.VOCAB_FILE_EXTENSION);
// Make all generated tokens files depend on their grammars
g.addEdge(grammarName+CodeGenerator.VOCAB_FILE_EXTENSION, gfile);
}
catch (FileNotFoundException fnfe) {
ErrorManager.error(ErrorManager.MSG_CANNOT_OPEN_FILE, gfile);
missingFiles.add(gfile);
}
}
List
© 2015 - 2025 Weber Informatics LLC | Privacy Policy