
org.etlunit.feature.database.SQLAggregatorImpl Maven / Gradle / Ivy
The newest version!
package org.etlunit.feature.database;
import org.etlunit.TestExecutionError;
import java.io.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
public class SQLAggregatorImpl implements SQLAggregator
{
private final List lineList = new ArrayList();
private final List statementList = new ArrayList();
private final String text;
public SQLAggregatorImpl(String ddlRef, SQLLocator locator, DatabaseConnection databaseConnection) throws IOException, TestExecutionError
{
this(makeText(ddlRef, locator, databaseConnection));
}
public SQLAggregatorImpl(String text) throws IOException, TestExecutionError
{
this.text = text;
// Use the output of the line processor and break it into statements since JDBC doesn't support multiple in one statement
splitLines();
}
private void splitLines()
{
// first split along line boundaries
BufferedReader breader = new BufferedReader(new StringReader(text));
String line = null;
try
{
String thisRef = "synthetic";
int thisLine = -1;
int totalLines = 0;
while ((line = breader.readLine()) != null)
{
totalLines++;
DDLInstrumentationExpression ddlie = new DDLInstrumentationExpression(line);
if (ddlie.matches())
{
// this is an instrumentation - record and discard
thisRef = ddlie.getRef();
thisLine = ddlie.getLineNo();
}
else
{
if (!line.trim().equals(""))
{
lineList.add(new FileRefImpl(thisRef, thisLine, totalLines, line));
}
}
}
}
catch (IOException e)
{
throw new RuntimeException("?", e);
}
StringWriter stw = new StringWriter();
PrintWriter prw = new PrintWriter(stw);
StringBuffer buffer = stw.getBuffer();
for (FileRef lineRec : lineList)
{
String [] sts = lineRec.getLine().split(";;", -1);
// append the first result.
prw.print(sts[0]);
for (int i = 1; i < sts.length; i++)
{
// each subsequent match is a complete statement, until the last one which is either a "" or should be a new line
prw.flush();
addStatement(statementList,
lineRec.getCurrentRefName(),
lineRec.getCurrentLineNumber(),
lineRec.getAggregatedLineNumber(),
stw.toString());
buffer.setLength(0);
// in the case of the last entry, just append to the string
if (i == (sts.length - 1) && !sts[i].trim().equals(""))
{
prw.println(sts[i]);
}
else
{
// otherwise, append a full statement
addStatement(statementList,
lineRec.getCurrentRefName(),
lineRec.getCurrentLineNumber(),
lineRec.getAggregatedLineNumber(),
sts[i]);
}
}
// append a newline
prw.println();
}
// if there is anything left over, push it on to the string writer stack
prw.flush();
if (buffer.length() != 0)
{
// attribute to the last line processed
FileRef lastRec = lineList.get(lineList.size() - 1);
addStatement(statementList,
lastRec.getCurrentRefName(),
lastRec.getCurrentLineNumber(),
lastRec.getAggregatedLineNumber(),
stw.toString());
}
}
private void addStatement(List statementList, String currentRefName, int currentLineNumber, int aggregatedLineNumber, String st)
{
if (!st.trim().equals(""))
{
statementList.add(new FileRefImpl(currentRefName, currentLineNumber, aggregatedLineNumber, st));
}
}
public static String makeText(String ddlRef, SQLLocator locator, DatabaseConnection databaseConnection) throws TestExecutionError, IOException
{
String ptext = locator.locate(ddlRef, databaseConnection);
DDLDirectiveNameExpression ddldne = new DDLDirectiveNameExpression(ptext);
Map includedReferences = new HashMap();
Map provideMap = new HashMap();
Map requireMap = new HashMap();
while (ddldne.hasNext())
{
String subText = "";
String directive = ddldne.getDirective();
String ddlReference = ddldne.getDdlReference();
// include is guaranteed once per ddl
if (directive.equals("include"))
{
if (!includedReferences.containsKey(ddlReference))
{
subText = locator.locate(ddlReference, databaseConnection);
includedReferences.put(ddlReference, "");
}
}
// fetch is included verbatim every time
else if (directive.equals("fetch"))
{
subText = locator.locate(ddlReference, databaseConnection);
}
else if (directive.equals("require"))
{
requireMap.put(ddlReference, ddlReference);
}
else if (directive.equals("provide"))
{
provideMap.put(ddlReference, ddlReference);
}
else
{
throw new TestExecutionError("Directive '" + directive + "' not understood", DatabaseConstants.ERR_BAD_DIRECTIVE);
}
int startIndex = ddldne.start();
int endIndex = ddldne.end();
// insert
ptext = (startIndex == 0 ? "" : ptext.substring(0, startIndex - 1)) + subText + ptext.substring(endIndex);
ddldne = new DDLDirectiveNameExpression(ptext);
}
// validate that all 'require's have been met
for (String require : requireMap.keySet())
{
if (!provideMap.containsKey(require))
{
throw new TestExecutionError("Script requires '" + require + "' but it was not provided", DatabaseConstants.ERR_UNMET_REQUIRE);
}
}
return ptext;
}
public String getText()
{
return text;
}
public Aggregator getLineAggregator()
{
return new Aggregator()
{
int index = 0;
public boolean hasNext()
{
return index < lineList.size();
}
public FileRef next()
{
return lineList.get(index++);
}
};
}
public Aggregator getStatementAggregator()
{
return new Aggregator()
{
int index = 0;
public boolean hasNext()
{
return index < statementList.size();
}
public FileRef next()
{
return statementList.get(index++);
}
};
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy