org.bitbucket.bradleysmithllc.etlunit.cli.MigrateCmd Maven / Gradle / Ivy
package org.bitbucket.bradleysmithllc.etlunit.cli;
import org.apache.commons.io.FilenameUtils;
import org.apache.commons.lang3.StringUtils;
import org.bitbucket.bradleysmithllc.etlunit.cli.regexp.DelimitedFileNameExpression;
import org.bitbucket.bradleysmithllc.etlunit.feature.file.FileRuntimeSupport;
import org.bitbucket.bradleysmithllc.etlunit.parser.ETLTestPackage;
import org.bitbucket.bradleysmithllc.etlunit.util.IOUtils;
import org.clamshellcli.api.Command;
import org.clamshellcli.api.Configurator;
import org.clamshellcli.api.Context;
import org.clamshellcli.api.IOConsole;
import org.bitbucket.bradleysmithllc.etlunit.Configuration;
import org.bitbucket.bradleysmithllc.etlunit.ETLTestVM;
import org.bitbucket.bradleysmithllc.etlunit.RuntimeSupport;
import org.bitbucket.bradleysmithllc.etlunit.feature.AbstractFeature;
import org.bitbucket.bradleysmithllc.etlunit.io.file.DataFile;
import org.bitbucket.bradleysmithllc.etlunit.io.file.DataFileManager;
import org.bitbucket.bradleysmithllc.etlunit.io.file.DataFileSchema;
import org.bitbucket.bradleysmithllc.etlunit.maven.ETLUnitMojo;
import javax.inject.Inject;
import java.io.File;
import java.io.FileFilter;
import java.util.*;
import java.util.List;
import java.util.regex.Pattern;
public class MigrateCmd implements Command {
enum scan_type {files, data}
private static final String NAMESPACE = "syscmd";
public static final String ACTION_NAME = "migrate";
private RuntimeSupport runtimeSupport;
private FileRuntimeSupport fileRuntimeSupport;
private DataFileManager dataFileManager;
public Object execute(Context ctx) {
IOConsole console = ctx.getIoConsole();
String[] args = (String[]) ctx.getValue(Context.KEY_COMMAND_LINE_ARGS);
String dataSetNamePattern = null;
if (args != null && args.length == 1) {
dataSetNamePattern = args[0];
}
try {
File basedir = new File(".");
Configuration config = ETLUnitMojo.loadConfiguration(basedir, "migrate", "1.0a1", null);
ETLTestVM vm = new ETLTestVM(config);
vm.addFeature(new AbstractFeature() {
@Override
public List getPrerequisites() {
return Arrays.asList("file");
}
@Inject
public void receiveRuntimeSupport(RuntimeSupport rs) {
runtimeSupport = rs;
}
@Inject
public void receiveFileRuntimeSupport(FileRuntimeSupport rs) {
fileRuntimeSupport = rs;
}
@Inject
public void receiveDataFileManager(DataFileManager dfm) {
dataFileManager = dfm;
}
@Override
public String getFeatureName() {
return "migrator";
}
});
vm.installFeatures();
// don't actually run the tests, I just wanted the support objects
// loop through all packages and prompt to migrate a data set
for (ETLTestPackage pack : runtimeSupport.getTestPackages()) {
console.writeOutput("Processing package: " + pack + Configurator.VALUE_LINE_SEP);
List fileList = new ArrayList();
File files = new File(runtimeSupport.getTestSourceDirectory(pack), "files");
scan(files, fileList, scan_type.files, dataSetNamePattern);
File data = new File(runtimeSupport.getTestSourceDirectory(pack), "data");
scan(data, fileList, scan_type.data, dataSetNamePattern);
List filesToMigrate;
if (fileList.size() == 0)
{
continue;
}
else if (fileList.size() == 1)
{
filesToMigrate = new ArrayList();
filesToMigrate.addAll(fileList);
}
else
{
filesToMigrate = selectAnItem(fileList, "Select a file to migrate: ", console, true);
}
if (filesToMigrate == null) {
return null;
}
List referenceFileSchemaFilesForPackage = fileRuntimeSupport.getReferenceFileSchemaFilesForPackage(null);
List schemaSource = selectAnItem(referenceFileSchemaFilesForPackage, "Select the source schema: ", console, false);
if (schemaSource == null) {
return null;
}
File sourceSchema = schemaSource.get(0);
schemaSource = selectAnItem(referenceFileSchemaFilesForPackage, "Select the destination schema: ", console, false);
if (schemaSource == null) {
return null;
}
File destSchema = schemaSource.get(0);
for (File dataSet : filesToMigrate) {
console.writeOutput("Migrating " + dataSet.getName() + " using " + sourceSchema.getName() + " as the source schema, and " + destSchema.getName() + " as the destination schema . . . ");
try {
DataFileSchema srcfml = dataFileManager.loadDataFileSchema(sourceSchema, FilenameUtils.removeExtension(sourceSchema.getName()));
DataFileSchema dstfml = dataFileManager.loadDataFileSchema(destSchema, FilenameUtils.removeExtension(destSchema.getName()));
DataFile srcData = dataFileManager.loadDataFile(dataSet, srcfml);
File toMigBak = new File(dataSet.getParentFile(), dataSet.getName() + ".bak");
File toMigTarg = new File(dataSet.getParentFile(), dataSet.getName() + ".tmp");
toMigBak.delete();
toMigTarg.delete();
DataFile destData = dataFileManager.loadDataFile(toMigTarg, dstfml);
dataFileManager.copyDataFile(srcData, destData);
// copy target over the source
dataSet.renameTo(toMigBak);
toMigTarg.renameTo(dataSet);
console.writeOutput("Done." + Configurator.VALUE_LINE_SEP);
} catch (Exception exc) {
console.writeOutput(exc.toString() + Configurator.VALUE_LINE_SEP);
}
}
console.writeOutput("Migration done." + Configurator.VALUE_LINE_SEP);
}
} catch (Exception exc) {
exc.printStackTrace();
console.writeOutput(exc.toString() + Configurator.VALUE_LINE_SEP);
}
return null;
}
private List selectAnItem(List fileList, String s, IOConsole console, boolean multipleAllowed) {
List selected = new ArrayList();
boolean done = false;
int num = 0;
console.writeOutput(s + Configurator.VALUE_LINE_SEP);
for (File option : fileList) {
console.writeOutput("[" + ++num + "] - " + IOUtils.removeExtension(option) + Configurator.VALUE_LINE_SEP);
}
console.writeOutput("[x] - Cancel" + Configurator.VALUE_LINE_SEP);
console.writeOutput("[d] - Done" + Configurator.VALUE_LINE_SEP);
while (!done && selected.size() < fileList.size()) {
String opt = console.readInput("Please select an option: ");
if (opt.equals("x")) {
return null;
}
if (opt.equals("d")) {
done = true;
if (selected.size() == 0)
{
return null;
}
} else {
try {
int select = Integer.parseInt(opt);
if (select <= 0 || select > fileList.size()) {
console.writeOutput("Numeric index out of range [1, " + fileList.size() + "]: " + select + Configurator.VALUE_LINE_SEP);
} else {
File e = fileList.get(select - 1);
if (selected.contains(e))
{
console.writeOutput("Item [" + e.getName() + "] already selected." + Configurator.VALUE_LINE_SEP);
}
else
{
selected.add(e);
if (!multipleAllowed)
{
done = true;
}
}
}
} catch (NumberFormatException e) {
console.writeOutput("Bad option - not a number, 'x' or 'd': " + opt + Configurator.VALUE_LINE_SEP);
}
}
}
return selected;
}
private void scan(File files, final List fileList, final scan_type data, String dataSetNamePattern) {
final Pattern namePattern;
if (dataSetNamePattern != null)
{
namePattern = Pattern.compile(dataSetNamePattern, Pattern.CASE_INSENSITIVE);
}
else
{
namePattern = null;
}
files.listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
switch (data) {
case files:
if (!pathname.getName().equals(".svn")) {
if (namePattern != null)
{
if (namePattern.matcher(pathname.getName()).find())
{
fileList.add(pathname);
}
}
else
{
fileList.add(pathname);
}
}
break;
case data:
DelimitedFileNameExpression dfne = new DelimitedFileNameExpression(pathname.getName());
if (dfne.matches()) {
if (namePattern != null)
{
if (namePattern.matcher(pathname.getName()).find())
{
fileList.add(pathname);
}
}
else
{
fileList.add(pathname);
}
}
break;
}
return false;
}
});
}
public void plug(Context plug) {
// no load-time setup needed
}
public Descriptor getDescriptor() {
return new Descriptor() {
public String getNamespace() {
return NAMESPACE;
}
public String getName() {
return ACTION_NAME;
}
public String getDescription() {
return "Migrates data files from one fml to another";
}
public String getUsage() {
return "migrate ";
}
public Map getArguments() {
HashMap map = new HashMap();
map.put("namePattern", "Regular expression of the data set(s) to match.");
return map;
}
};
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy