org.etlunit.cli.MigrateCmd Maven / Gradle / Ivy
package org.etlunit.cli;
import org.clamshellcli.api.Command;
import org.clamshellcli.api.Configurator;
import org.clamshellcli.api.Context;
import org.clamshellcli.api.IOConsole;
import org.etlunit.Configuration;
import org.etlunit.ETLTestVM;
import org.etlunit.RuntimeSupport;
import org.etlunit.feature.AbstractFeature;
import org.etlunit.feature.file.FileRuntimeSupport;
import org.etlunit.io.file.DataFile;
import org.etlunit.io.file.DataFileManager;
import org.etlunit.io.file.DataFileSchema;
import org.etlunit.maven.ETLUnitMojo;
import javax.inject.Inject;
import java.awt.*;
import java.io.File;
import java.io.FileFilter;
import java.util.*;
import java.util.List;
public class MigrateCmd implements Command
{
private static final String NAMESPACE = "syscmd";
public static final String ACTION_NAME = "migrate";
private RuntimeSupport runtimeSupport;
private FileRuntimeSupport fileRuntimeSupport;
private DataFileManager dataFileManager;
public Object execute(Context ctx)
{
IOConsole console = ctx.getIoConsole();
try
{
File basedir = new File(".");
Configuration config = ETLUnitMojo.loadConfiguration(basedir, "migrate", "1.0a1", null);
ETLTestVM vm = new ETLTestVM(config);
vm.addFeature(new AbstractFeature()
{
@Override
public List getPrerequisites()
{
return Arrays.asList("file");
}
@Inject
public void receiveRuntimeSupport(RuntimeSupport rs)
{
runtimeSupport = rs;
}
@Inject
public void receiveFileRuntimeSupport(FileRuntimeSupport rs)
{
fileRuntimeSupport = rs;
}
@Inject
public void receiveDataFileManager(DataFileManager dfm)
{
dataFileManager = dfm;
}
@Override
public String getFeatureName()
{
return "migrator";
}
});
vm.installFeatures();
// don't actually run the tests, I just wanted the support objects
// loop through all packages and prompt to migrate a data set
for (String pack : runtimeSupport.getTestPackages())
{
console.writeOutput("Processing package: " + pack + Configurator.VALUE_LINE_SEP);
List fileList = new ArrayList();
File files = new File(runtimeSupport.getTestSourceDirectory(pack), "files");
scan(files, fileList);
File data = new File(runtimeSupport.getTestSourceDirectory(pack), "data");
scan(data, fileList);
int num = -1;
File toMig = null;
while (fileList.size() != 0)
{
String read = console.readInput("Select a file to migrate: ");
try
{
num = Integer.parseInt(read);
if (num < 0 || num >= fileList.size())
{
console.writeOutput("Please enter a number between [0] and [" + (fileList.size() - 1) + "]" + Configurator.VALUE_LINE_SEP);
}
else
{
toMig = fileList.get(num);
break;
}
}
catch(NumberFormatException exc)
{
console.writeOutput(exc.toString() + Configurator.VALUE_LINE_SEP);
}
}
console.writeOutput("Migrating " + toMig.getName() + " . . . " + Configurator.VALUE_LINE_SEP);
List idList = new ArrayList();
console.writeOutput("Select the source and destination schemas" + Configurator.VALUE_LINE_SEP);
for (String id : fileRuntimeSupport.getReferenceFileSchemasForPackage(null))
{
console.writeOutput("[" + idList.size() + "] - " + id + Configurator.VALUE_LINE_SEP);
idList.add(id);
}
String srcSchema = null;
String destSchema = null;
while (idList.size() != 0)
{
String read = console.readInput("Select the source schema: ");
try
{
num = Integer.parseInt(read);
if (num < 0 || num >= idList.size())
{
console.writeOutput("Please enter a number between [0] and [" + (idList.size() - 1) + "]" + Configurator.VALUE_LINE_SEP);
}
else
{
srcSchema = idList.get(num);
break;
}
}
catch(NumberFormatException exc)
{
console.writeOutput(exc.toString() + Configurator.VALUE_LINE_SEP);
}
}
while (idList.size() != 0)
{
String read = console.readInput("Select the destination schema: ");
try
{
num = Integer.parseInt(read);
if (num < 0 || num >= idList.size())
{
console.writeOutput("Please enter a number between [0] and [" + (idList.size() - 1) + "]" + Configurator.VALUE_LINE_SEP);
}
else
{
destSchema = idList.get(num);
break;
}
}
catch(NumberFormatException exc)
{
console.writeOutput(exc.toString() + Configurator.VALUE_LINE_SEP);
}
}
console.writeOutput("Migrating " + toMig.getName() + " using " + srcSchema + " as the source schema, and " + destSchema + " as the destination schema . . . " + Configurator.VALUE_LINE_SEP);
File srcfmlFile = fileRuntimeSupport.getReferenceFileSchema(srcSchema);
File dstfmlFile = fileRuntimeSupport.getReferenceFileSchema(destSchema);
DataFileSchema srcfml = dataFileManager.loadDataFileSchema(srcfmlFile);
DataFileSchema dstfml = dataFileManager.loadDataFileSchema(dstfmlFile);
DataFile srcData = dataFileManager.loadDataFile(toMig, srcfml);
File toMigBak = new File(toMig.getParentFile(), toMig.getName() + ".bak");
File toMigTarg = new File(toMig.getParentFile(), toMig.getName() + ".tmp");
DataFile destData = dataFileManager.loadDataFile(toMigTarg, dstfml);
dataFileManager.copyDataFile(srcData, destData);
// copy target over the source
toMig.renameTo(toMigBak);
toMigTarg.renameTo(toMig);
}
}
catch (Exception exc)
{
exc.printStackTrace();
console.writeOutput(exc.toString() + Configurator.VALUE_LINE_SEP);
}
return null;
}
private void scan(File files, final List fileList)
{
System.out.println(files.getAbsolutePath());
files.listFiles(new FileFilter()
{
@Override
public boolean accept(File pathname)
{
if (!pathname.getName().equals(".svn"))
{
System.out.println("[" + fileList.size() + "] - " + pathname.getName());
fileList.add(pathname);
}
return false;
}
});
}
public void plug(Context plug)
{
// no load-time setup needed
}
public Descriptor getDescriptor()
{
return new Descriptor()
{
public String getNamespace()
{
return NAMESPACE;
}
public String getName()
{
return ACTION_NAME;
}
public String getDescription()
{
return "Migrates data files from one fml to another";
}
public String getUsage()
{
return "migrate";
}
public Map getArguments()
{
return Collections.EMPTY_MAP;
}
};
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy