org.bitbucket.bradleysmithllc.etlunit.feature.database.DbAssertExtender Maven / Gradle / Ivy
package org.bitbucket.bradleysmithllc.etlunit.feature.database;
/*
* #%L
* etlunit-database
* %%
* Copyright (C) 2010 - 2014 bradleysmithllc
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.JsonNodeType;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang3.ObjectUtils;
import org.bitbucket.bradleysmithllc.etlunit.*;
import org.bitbucket.bradleysmithllc.etlunit.context.VariableContext;
import org.bitbucket.bradleysmithllc.etlunit.feature.Feature;
import org.bitbucket.bradleysmithllc.etlunit.feature.RuntimeOption;
import org.bitbucket.bradleysmithllc.etlunit.feature._assert.AssertFeatureModule;
import org.bitbucket.bradleysmithllc.etlunit.feature.database.db.Table;
import org.bitbucket.bradleysmithllc.etlunit.feature.database.json.database._assert.assert_data.AssertDataHandler;
import org.bitbucket.bradleysmithllc.etlunit.feature.database.json.database._assert.assert_data.AssertDataRequest;
import org.bitbucket.bradleysmithllc.etlunit.feature.database.json.database._assert.assert_data_set.AssertDataSetHandler;
import org.bitbucket.bradleysmithllc.etlunit.feature.database.json.database._assert.assert_data_set.AssertDataSetRequest;
import org.bitbucket.bradleysmithllc.etlunit.feature.extend.Extender;
import org.bitbucket.bradleysmithllc.etlunit.feature.file.FileRuntimeSupport;
import org.bitbucket.bradleysmithllc.etlunit.io.file.DataFile;
import org.bitbucket.bradleysmithllc.etlunit.io.file.DataFileManager;
import org.bitbucket.bradleysmithllc.etlunit.io.file.DataFileSchema;
import org.bitbucket.bradleysmithllc.etlunit.io.file.dataset.DataSet;
import org.bitbucket.bradleysmithllc.etlunit.io.file.dataset.ReaderDataSet;
import org.bitbucket.bradleysmithllc.etlunit.io.file.dataset.ReaderDataSetContainer;
import org.bitbucket.bradleysmithllc.etlunit.io.file.dataset.ReaderDataSetUtils;
import org.bitbucket.bradleysmithllc.etlunit.listener.ClassResponder;
import org.bitbucket.bradleysmithllc.etlunit.metadata.MetaDataPackageContext;
import org.bitbucket.bradleysmithllc.etlunit.parser.*;
import javax.inject.Inject;
import javax.inject.Named;
import java.io.*;
import java.util.*;
public class DbAssertExtender implements Extender, AssertDataHandler, AssertDataSetHandler
{
private DataFileManager dataFileManager;
private final DatabaseFeatureModule parent;
private DatabaseRuntimeSupport databaseRuntimeSupport;
private FileRuntimeSupport fileRuntimeSupport;
private RuntimeSupport runtimeSupport;
@Inject
@Named("database.refreshAssertionData")
private RuntimeOption refreshAssertionData = null;
protected Log applicationLog;
public DbAssertExtender(DatabaseFeatureModule parent)
{
this.parent = parent;
}
@Inject
public void receiveDataFileManager(DataFileManager manager)
{
dataFileManager = manager;
}
@Inject
public void receiveRuntimeSupport(RuntimeSupport manager)
{
runtimeSupport = manager;
}
@Inject
public void receiveFileRuntimeSupport(FileRuntimeSupport support)
{
fileRuntimeSupport = support;
}
@Inject
public void setApplicationLog(@Named("applicationLog") Log log)
{
applicationLog = log;
}
@Inject
public void setDatabaseRuntimeSupport(DatabaseRuntimeSupport databaseRuntimeSupport)
{
this.databaseRuntimeSupport = databaseRuntimeSupport;
}
@Override
public ClassResponder.action_code process(ETLTestMethod mt, ETLTestOperation op, ETLTestValueObject operands, VariableContext context, ExecutionContext executionContext, int executor) throws TestAssertionFailure, TestExecutionError, TestWarning
{
return action_code.defer;
}
public Feature getFeature()
{
return parent;
}
interface AssertHelper
{
String getSourceTable();
String getSourceSchema();
String getTarget();
AssertDataRequest.AssertionMode getAssertionMode();
String getSqlScript();
AssertDataRequest.ColumnListMode getColumnListMode();
Set getColumnList();
String getFailureId();
boolean hasFile();
File getDataFile();
void processRefreshedAssertionData(DataFile expected) throws TestExecutionError;
}
@Override
public action_code assertData(final AssertDataRequest request, final ETLTestMethod testMethod, final ETLTestOperation testOperation, final ETLTestValueObject valueObject, final VariableContext variableContext, ExecutionContext executionContext) throws TestAssertionFailure, TestExecutionError, TestWarning
{
return
assertDataImpl(
new AssertHelper()
{
@Override
public String getSourceTable()
{
return request.getSourceTable();
}
@Override
public String getSourceSchema()
{
return request.getSourceSchema();
}
@Override
public String getTarget()
{
return request.getTarget();
}
@Override
public AssertDataRequest.AssertionMode getAssertionMode()
{
return request.getAssertionMode();
}
@Override
public String getSqlScript()
{
return request.getSqlScript();
}
@Override
public AssertDataRequest.ColumnListMode getColumnListMode()
{
return request.getColumnListMode();
}
@Override
public Set getColumnList()
{
return request.getColumnList();
}
@Override
public String getFailureId()
{
return request.getFailureId();
}
@Override
public boolean hasFile()
{
return false;
}
@Override
public File getDataFile()
{
return null;
}
@Override
public void processRefreshedAssertionData(DataFile expected)
{
// nothing here
}
},
testMethod,
testOperation,
valueObject,
variableContext,
executionContext
);
}
public action_code assertDataImpl(final AssertHelper operation, final ETLTestMethod mt, final ETLTestOperation op, final ETLTestValueObject obj, final VariableContext context, ExecutionContext econtext) throws TestAssertionFailure, TestExecutionError, TestWarning
{
final String sourceTable = operation.getSourceTable();
String qualifiedName = operation.getSourceTable();
String schema = operation.getSourceSchema();
if (schema != null)
{
qualifiedName = schema + "." + sourceTable;
}
DatabaseClassListener.ConnectionMode connMode = DatabaseClassListener.getConnection(parent, obj, context, op.getTestMethod()).get(0);
final DatabaseConnection conn = connMode.conn;
final String mode = connMode.getMode();
// now send it off to the implementation
DatabaseImplementation impl = DatabaseClassListener.getDatabaseImplementation(parent, conn);
final String targetFailureId = connMode.connectionId + "." + connMode.getPrettyMode() + "." + schema + "." + runtimeSupport.processReference(operation.getTarget());
final String sourceFailureId = connMode.connectionId + "." + connMode.getPrettyMode() + "." + qualifiedName;
// dispatch this as an operation (even though this feature will ultimately process)
ETLTestValueObject copy = obj.copy();
final File target;
final String tar = operation.getTarget();
// create a target file attribute for our extract
if (tar != null)
{
target = databaseRuntimeSupport.getGeneratedData(conn, connMode.getPrettyMode(), op.getTestMethod().getQualifiedName() + "_tgt_" + tar);
}
else
{
target = databaseRuntimeSupport.getGeneratedData(conn, connMode.getPrettyMode(), sourceTable + "_" + impl.getDatabaseName(conn, mode) + "_" + op.getTestMethod().getName());
}
// remove the assertion-mode attribute since it does not pass validation
// for the extract operation
/**
* Remove the column list mode and list attributes - I.E., do a full extract,
* so that the data set comparator has a complete data set to work with.
*/
ETLTestValueObjectBuilder cbuilder = new ETLTestValueObjectBuilder(copy);
if (copy.query("assertion-mode") != null)
{
cbuilder = cbuilder.removeKey("assertion-mode");
}
if (copy.query("column-list-mode") != null)
{
cbuilder = cbuilder.removeKey("column-list-mode");
}
if (copy.query("column-list") != null)
{
cbuilder = cbuilder.removeKey("column-list");
}
if (copy.query("failure-id") != null)
{
cbuilder = cbuilder.removeKey("failure-id");
}
// if source is supplied, that should be passed on to the extract operation
ETLTestValueObject trftquery = copy.query("source-reference-file-type");
if (trftquery != null && !trftquery.isNull())
{
cbuilder.key("reference-file-type").value(trftquery);
cbuilder.removeKey("source-reference-file-type");
}
if (copy.query("target-reference-file-type") != null)
{
cbuilder = cbuilder.removeKey("target-reference-file-type");
}
cbuilder = cbuilder.key("target-file")
.value(target.getAbsolutePath());
copy = cbuilder.toObject();
ETLTestOperation extract_op = op.createSibling("extract", copy);
econtext.process(extract_op, context);
final DataFileSchema extractFileSchema;
// grab the schema the extract produced
if (context.hasVariableBeenDeclared(DatabaseClassListener.LAST_EXTRACT_FILE_SCHEMA))
{
extractFileSchema = (DataFileSchema) context.getValue(DatabaseClassListener.LAST_EXTRACT_FILE_SCHEMA).getValueAsPojo();
}
else
{
extractFileSchema = null;
}
final Table table;
// grab the schema the extract produced
if (context.hasVariableBeenDeclared(DatabaseClassListener.LAST_EXTRACT_TABLE))
{
table = (Table) context.getValue(DatabaseClassListener.LAST_EXTRACT_TABLE).getValueAsPojo();
}
else
{
table = null;
}
// defer to the file runtime support implementation
fileRuntimeSupport.processDataSetAssertion(
new FileRuntimeSupport.DataSetAssertionRequest()
{
@Override
public void processRefreshedAssertionData(DataFile expected) throws TestExecutionError
{
operation.processRefreshedAssertionData(expected);
}
@Override
public assertMode getAssertMode()
{
AssertDataRequest.AssertionMode assertionMode = operation.getAssertionMode();
return assertionMode != null ? FileRuntimeSupport.DataSetAssertionRequest.assertMode.valueOf(assertionMode.name().toLowerCase()) : assertMode.equals;
}
@Override
public String getActualSchemaName()
{
return ObjectUtils.firstNonNull(operation.getSourceTable(), operation.getSqlScript());
}
@Override
public String getActualBackupSchemaName()
{
return databaseRuntimeSupport.createSchemaKey(table);
}
@Override
public String getExpectedSchemaName()
{
return tar;
}
@Override
public String getExpectedBackupSchemaName()
{
return getActualBackupSchemaName();
}
@Override
public boolean hasColumnList()
{
return operation.getColumnListMode() != null;
}
@Override
public columnListMode getColumnListMode()
{
AssertDataRequest.ColumnListMode columnListMode2 = operation.getColumnListMode();
return columnListMode2 != null ? FileRuntimeSupport.DataSetAssertionRequest.columnListMode.valueOf(columnListMode2.name().toLowerCase()) : null;
}
@Override
public Set getColumnList()
{
return operation.getColumnList();
}
@Override
public String getFailureId()
{
String failureId1 = operation.getFailureId();
if (
getAssertMode() == assertMode.empty
||
getAssertMode() == assertMode.not_empty
)
{
return ObjectUtils.firstNonNull(failureId1, sourceFailureId);
}
else if (
getAssertMode() == assertMode.equals
)
{
return ObjectUtils.firstNonNull(failureId1, targetFailureId);
}
return failureId1;
}
@Override
public DataFile getExpectedDataset(
DataFileSchema explicitActualSchema,
DataFileSchema actualEffectiveSchema,
DataFileSchema explicitExpectedSchema,
DataFileSchema expectedEffetiveSchema
)
{
File dfile;
if (operation.hasFile())
{
dfile = operation.getDataFile();
}
else
{
dfile = databaseRuntimeSupport.getSourceDataForCurrentTest(tar, "assertion-target", explicitExpectedSchema.getFormatType());
}
return dataFileManager.loadDataFile(dfile, explicitExpectedSchema);
}
@Override
public DataFile getActualDataset(
DataFileSchema explicitActualSchema,
DataFileSchema actualEffectiveSchema,
DataFileSchema explicitExpectedSchema,
DataFileSchema expectedEffetiveSchema
)
{
return dataFileManager.loadDataFile(
target,
explicitActualSchema
);
}
@Override
public ETLTestMethod getTestMethod()
{
return mt;
}
@Override
public ETLTestOperation getTestOperation()
{
return op;
}
@Override
public ETLTestValueObject getOperationParameters()
{
return obj;
}
@Override
public VariableContext getVariableContext()
{
return context;
}
@Override
public MetaDataPackageContext getMetaDataPackageContext()
{
return null;
}
@Override
public boolean refreshAssertionData()
{
return refreshAssertionData.isEnabled();
}
@Override
public DataFileSchema getSourceSchema()
{
return extractFileSchema;
}
}
);
return ClassResponder.action_code.handled;
}
@Override
public action_code assertDataSet(final AssertDataSetRequest request, ETLTestMethod testMethod, final ETLTestOperation testOperation, ETLTestValueObject valueObject, final VariableContext variableContext, ExecutionContext executionContext) throws TestAssertionFailure, TestExecutionError, TestWarning
{
// open the data set and verify it exists
final File target = new File(request.getTargetFilePath());
// extract off the target-file-path part
final ETLTestValueObject copyObj = new ETLTestValueObjectBuilder(valueObject.copy()).removeKey("target-file-path").toObject();
/*
final File tfile = databaseRuntimeSupport.getSourceDataSetForCurrentTest(request.getDataSetName(), "assertion-target");
*/
/*
// extract the data set into a data file for later processing
final File dfile = runtimeSupport.createAnonymousTempFile();
final String dataSetId = request.getDataSetId();
final DataSet ds;
// if refresh option is on, this is not a failure
if (!tfile.exists() && !refreshAssertionData.isEnabled()) {
throw new TestExecutionError("Data set [" + tfile.getName() + "] not found", DatabaseConstants.ERR_MISSING_DATA_SET);
}
*/
/*
// again, only bother extracting the file if we are not refreshing assertion data
if (!refreshAssertionData.isEnabled()) {
FileReader fileReader = new FileReader(tfile);
try {
ReaderDataSetContainer rdsc = new ReaderDataSetContainer(dataFileManager, fileReader);
if (dataSetId != null) {
try {
ds = rdsc.locate(dataSetId);
} catch (IllegalArgumentException exc) {
throw new TestExecutionError("Could not locate data set with id {" + dataSetId + "}", DatabaseConstants.ERR_MISSING_DATA_SET_ID, exc);
}
} else {
// grab the first one
if (rdsc.hasNext()) {
ds = rdsc.next();
} else {
throw new TestExecutionError("Data set is empty", DatabaseConstants.ERR_MISSING_DATA_SET);
}
}
// copy into temp file
Reader reader = ds.read();
try {
Writer writer = new BufferedWriter(new FileWriter(dfile));
try {
IOUtils.copy(reader, writer);
} finally {
writer.close();
}
} finally {
reader.close();
}
} finally {
fileReader.close();
}
}
else
{
ds = null;
}
// make a copy of the request before modifying
ETLTestValueObject copyObj = valueObject.copy();
// override the properties with what are in the data set, removing what should not be seen downstream
Boolean ignoreDataSetProperties = request.getIgnoreDataSetProperties();
if (ds != null && (ignoreDataSetProperties == null || !ignoreDataSetProperties.booleanValue())) {
// directed to use the data set properties
copyObj = copyObj.merge(
ETLTestParser.loadObject(ds.getProperties().toString()),
ETLTestValueObject.merge_type.left_merge,
ETLTestValueObject.merge_policy.recursive
);
}
final ETLTestValueObject copy = copyObj;
ETLTestValueObjectBuilder etlTestValueObjectBuilder = new ETLTestValueObjectBuilder(copy);
// remove keys which only pertain to the data set
etlTestValueObjectBuilder.removeKey("data-set-name");
if (etlTestValueObjectBuilder.hasKey("data-set-id")) {
etlTestValueObjectBuilder.removeKey("data-set-id");
}
if (etlTestValueObjectBuilder.hasKey("id")) {
etlTestValueObjectBuilder.removeKey("id");
}
final ETLTestValueObject newValueObj = etlTestValueObjectBuilder.toObject();
*/
return
assertDataImpl(
new AssertHelper()
{
@Override
public String getSourceTable()
{
return checkAttribute(copyObj, "source-table");
}
@Override
public String getSourceSchema()
{
return checkAttribute(copyObj, "source-schema");
}
@Override
public String getTarget()
{
return request.getTarget();
}
@Override
public AssertDataRequest.AssertionMode getAssertionMode()
{
AssertDataSetRequest.AssertionMode assertionMode = request.getAssertionMode();
return assertionMode == null ? null : AssertDataRequest.AssertionMode.valueOf(assertionMode.name());
}
@Override
public String getSqlScript()
{
return checkAttribute(copyObj, "sql-script");
}
@Override
public AssertDataRequest.ColumnListMode getColumnListMode()
{
AssertDataSetRequest.ColumnListMode columnListMode = request.getColumnListMode();
return columnListMode == null ? null : AssertDataRequest.ColumnListMode.valueOf(columnListMode.name());
}
@Override
public Set getColumnList()
{
return request.getColumnList();
}
@Override
public String getFailureId()
{
return checkAttribute(copyObj, "failure-id");
}
@Override
public boolean hasFile()
{
return true;
}
@Override
public File getDataFile()
{
return target;
}
@Override
public void processRefreshedAssertionData(DataFile expected) throws TestExecutionError
{
// rename the source data set to something else temporary
// 'target' is our new data file we must overlay on top
// of the project data file
AssertFeatureModule.signalRefreshAssertionData(testOperation, variableContext);
// special case if the file does not exist yet
/*
if (!tfile.exists())
{
// in this case we propogate the properties from the assert operation, minus the data-set-name and the data-set-id
try
{
Writer fwriter = new BufferedWriter(new FileWriter(tfile));
try
{
FileReader body = new FileReader(dfile);
try
{
// don't use all the inferred stuff here, but remove the data-set-name and id
ETLTestValueObject operands = testOperation.getOperands().copy();
ETLTestValueObjectBuilder builder = new ETLTestValueObjectBuilder(operands);
builder.removeKey("data-set-name");
if (builder.hasKey("data-set-id"))
{
builder.key("id").value(operands.query("data-set-id").getValueAsString());
builder.removeKey("data-set-id");
}
ReaderDataSetUtils.write(operands.getJsonNode(), body, fwriter);
}
finally
{
body.close();
}
}
finally
{
fwriter.close();
}
}
catch(IOException exc)
{
throw new TestExecutionError("", DatabaseConstants.ERR_IO_ERR, exc);
}
}
// copy the source data set, every set except the one we are updating
// in the case of not id present, replace the first one and just migrate the rest
try {
// open the target and write the first data set (preserving properties), then pipe the rest in.
FileReader fileReader1 = new FileReader(tfile);
Writer fwriter = new BufferedWriter(new FileWriter(temp));
try {
ReaderDataSetContainer rdscSource = new ReaderDataSetContainer(dataFileManager, fileReader1);
ReaderDataSetUtils.copy(rdscSource, fwriter, new ReaderDataSetUtils.CopyVisitor() {
@Override
public JsonNode getProperties(DataSet sourceDataSet, int index) {
return null;
}
@Override
public Reader read(DataSet set, int index) throws IOException {
boolean update = dataSetId == null;
if (dataSetId != null) {
// check the properties for a match on id number
JsonNode jid = set.getProperties().get("id");
if (jid != null && jid.getNodeType() == JsonNodeType.STRING) {
update = true;
}
}
if (update) {
// replace with new data contents
return new FileReader(dfile);
}
return null;
}
@Override
public void close(Reader reader, DataSet set, int dsIndex) throws IOException {
reader.close();
}
@Override
public boolean includeDataSet(DataSet dataSet, int index) {
// copy everything
return true;
}
});
} finally {
try {
fileReader1.close();
} finally {
fwriter.close();
}
}
// swap files
FileUtils.copyFile(temp, tfile);
} catch (IOException exc) {
throw new TestExecutionError("Error copying stage data set", DatabaseConstants.ERR_IO_ERR, exc);
}
*/
}
},
testMethod,
testOperation,
copyObj,
variableContext,
executionContext
);
}
private String checkAttribute(ETLTestValueObject newValueObj, String s)
{
ETLTestValueObject query = newValueObj.query(s);
return query != null ? query.getValueAsString() : null;
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy