
oracle.kv.util.migrator.StateHandler Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of oracle-nosql-server Show documentation
Show all versions of oracle-nosql-server Show documentation
NoSQL Database Server - supplies build and runtime support for the server (store) side of the Oracle NoSQL Database.
The newest version!
/*-
* Copyright (C) 2011, 2018 Oracle and/or its affiliates. All rights reserved.
*
* This file was distributed by Oracle as part of a version of Oracle NoSQL
* Database made available at:
*
* http://www.oracle.com/technetwork/database/database-technologies/nosqldb/downloads/index.html
*
* Please see the LICENSE file included in the top-level directory of the
* appropriate version of Oracle NoSQL Database for a copy of the license and
* additional information.
*/
package oracle.kv.util.migrator;
import static oracle.kv.util.migrator.impl.util.MigratorUtils.checkFileExist;
import static oracle.kv.util.migrator.impl.util.MigratorUtils.compareFilePaths;
import static oracle.kv.util.migrator.impl.util.MigratorUtils.formatName;
import static oracle.kv.util.migrator.impl.util.MigratorUtils.formatTimestamp;
import static oracle.kv.util.migrator.impl.util.MigratorUtils.mbyteToByte;
import static oracle.kv.util.migrator.impl.util.MigratorUtils.toFilePath;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.nio.channels.FileChannel;
import java.nio.channels.FileLock;
import java.nio.channels.OverlappingFileLockException;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.atomic.AtomicLong;
import java.util.logging.Level;
import java.util.logging.Logger;
import oracle.kv.impl.test.TestHook;
import oracle.kv.util.migrator.impl.util.LimitedFile;
import oracle.kv.util.migrator.impl.util.LimitedFile.FileCountLimitException;
/**
* The migrator state handler encapsulates 2 handlers:
*
* 1. CheckpointHandler
* Manages the migration completion state in DataSource granularity.
*
* The checkpointOutput is the directory to hold all checkpoint files, if
* it is specified, then existing checkpoint files will be loaded and its
* corresponding DataSource will be marked as completed and skipped during
* this run.
* Once migration of DataSouce is completed then an new checkpoint file
* will be created with some completion information. The checkpiont file
* maps to specific DataSource by name, the checkpoint file name is
* transformed from its corresponding DataSource name by replacing some
* special characters include '\', '.', ':' with '-'.
*
* 2. ErrorOutputHandler
* Log errors or warnings.
*
* The errorOutput is the directory to hold all error files, if it is
* specified, the errors or warnings will be written to the error file.
* Each error file corresponds to a specific DataSource, the error file is
* {@link LimitedFile} which has size limit for each single file and count
* limit for the number of files can be used for each error file. The
* default size limit is {@code DEF_FILE_SIZE_LIMIT_MB}, the default count
* is {@code DEF_FILE_NUM_LIMIT}.
*
* For both specified checkpointOutput and errorOutput directory,
* o If exists, then the directory should be readable and writable.
* o If not exists, the directory will be created.
* o A lock like "wr.lck" will be created in the directory which is used to
* acquire exclusive lock to the directory to avoid the conflict of writing
* to the same directory by another migrator.
*/
public class StateHandler {
private static final int DEF_FILE_SIZE_LIMIT_MB = 10;
private static final int DEF_FILE_NUM_LIMIT = 5;
private final CheckpointHandler checkpointHdl;
private final ErrorOutputHandler errorHdl;
private final Map dirLockers;
private final Logger logger;
public static TestHook errorCheckHook;
StateHandler(boolean abortOnError,
String errorOutput,
int errFileLimitMB,
int errFileCount,
String checkpointOutput,
Logger logger) {
dirLockers = new TreeMap(new Comparator() {
@Override
public int compare(File f1, File f2) {
return compareFilePaths(f1, f2);
}
});
this.logger = logger;
errorHdl = new ErrorOutputHandler(abortOnError,
errorOutput,
errFileLimitMB,
errFileCount);
checkpointHdl = new CheckpointHandler(checkpointOutput);
}
public void lockDirectory(File dir) {
if (!dirLockers.containsKey(dir)) {
dirLockers.put(dir, new DirectoryLocker(dir));
}
}
public boolean isLoaded(DataSource source) {
return checkpointHdl.isLoaded(source);
}
public void setLoaded(DataSource source,
DataSink sink,
long timestampMs,
long numLoaded) {
checkpointHdl.setLoaded(source, sink, timestampMs, numLoaded);
}
public void error(DataSource source,
String entry,
String message,
RuntimeException re) {
errorHdl.error(source, entry, message, re);
}
public void warning(DataSource source, String entry, String message) {
errorHdl.warning(source, entry, message);
}
public long getNumLoaded() {
return checkpointHdl.getNumLoaded();
}
public void close() {
checkpointHdl.close();
errorHdl.close();
for (DirectoryLocker locker : dirLockers.values()) {
locker.release();
}
}
/**
* Base class of CheckpointHandler and ErrorOutputHandler
*/
private class OutputHandler {
final File outputDir;
public OutputHandler(String baseDir, boolean createIfNotExists) {
if (baseDir != null) {
final File dir = new File(baseDir);
final boolean exists = dir.exists();
if (!exists && createIfNotExists) {
if (!dir.mkdirs()) {
throw new IllegalArgumentException(
"Failed to create output directory: " +
toFilePath(dir));
}
} else {
checkFileExist(dir, true/* isDir */, true /* canRead */,
true /* canWrite */);
}
lockDirectory(dir);
this.outputDir = dir;
} else {
this.outputDir = null;
}
}
public void close() {
}
File getOutputFile(String fileName, boolean createIfNotExists) {
final File file = new File(outputDir, fileName);
if (createIfNotExists && !file.exists()) {
try {
file.createNewFile();
} catch (IOException ioe) {
String msg = "Failed to create file: " + file;
logger.log(Level.SEVERE, msg, ioe);
throw new IllegalArgumentException(msg, ioe);
}
}
return file;
}
}
/**
* CheckpointHandler
*/
private class CheckpointHandler extends OutputHandler {
private final static String CHECKPOINT_FILE_SUFFFIX = ".ckp";
private final static String SOURCE = "source";
private final static String SINK = "sink";
private final static String MACHINE = "machine";
private final static String LOADTIME = "time";
private final static String LOADNUM = "recordNum";
private final String hostname = getLocalHostName();
private final Set loaded;
private final AtomicLong total;
public CheckpointHandler(String checkpointDir) {
super(checkpointDir, true);
loaded = new HashSet();
if (outputDir != null) {
loadCheckpointFiles(outputDir);
}
total = new AtomicLong();
}
/**
* Records the state that the migration from the specified data source
* is completed.
*/
public void setLoaded(DataSource source,
DataSink sink,
long timestampMs,
long numLoaded) {
String name = getStateFileName(source, CHECKPOINT_FILE_SUFFFIX);
if (outputDir != null) {
File cpfile = getOutputFile(name, true);
writeToCheckpointFile(cpfile, source.getName(), sink.getName(),
timestampMs, numLoaded);
}
total.addAndGet(numLoaded);
loaded.add(name);
}
/**
* Returns true if the migration from the specified data source is done
*/
public boolean isLoaded(DataSource source) {
return loaded.contains(source.getName());
}
/**
* Returns the total number of records migrated.
*/
public long getNumLoaded() {
return total.get();
}
@Override
public void close() {
super.close();
}
private String readSourceFromCheckpointFile(File file) {
if (!file.getName().endsWith(CHECKPOINT_FILE_SUFFFIX)) {
return null;
}
try {
Properties props = new Properties();
props.load(new FileReader(file));
String machine = props.getProperty(MACHINE);
String loadedTime = (String)props.get(LOADTIME);
String sourceName = props.getProperty(SOURCE);
if (machine != null && loadedTime != null &&
sourceName != null) {
logger.log(Level.FINE,
"Loaded checkpoint information from " + file +
": " + props.toString());
return sourceName;
}
return null;
} catch (IOException e) {
String msg = "Failed to load from checkpoint file " + file;
logger.log(Level.SEVERE, msg, e);
throw new IllegalArgumentException(msg, e);
}
}
/**
* Writes the checkpoint information to the underlying file
*/
private void writeToCheckpointFile(File file,
String source,
String sink,
long timestampMs,
long numLoaded) {
Properties props = new Properties();
props.put(SOURCE, source);
props.put(MACHINE, hostname);
props.put(LOADTIME, formatTimestamp(timestampMs));
props.put(SINK, sink);
props.put(LOADNUM, String.valueOf(numLoaded));
try {
props.store(new FileWriter(file), null);
} catch (IOException ioe) {
String msg = "Failed to write checkpoint file: " + file;
logger.log(Level.SEVERE, msg, ioe);
throw new IllegalArgumentException(msg, ioe);
}
}
/**
* Collects the name of migrated data sources from checkpoint files.
*/
private void loadCheckpointFiles(File dir) {
final File[] files = dir.listFiles();
for (File file : files) {
if (!file.isFile()) {
logger.log(Level.FINE,
"Skip an invalid checkpoint file: " + file);
continue;
}
String sourceName = readSourceFromCheckpointFile(file);
if (sourceName == null) {
logger.log(Level.FINE,
"The file is not a valid checkpoint file: " +
file.getAbsolutePath());
}
loaded.add(sourceName);
continue;
}
}
/**
* Returns the name of local host.
*/
private String getLocalHostName() {
try {
return InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
return "localhost";
}
}
}
/**
* ErrorOuputHandler
*/
private class ErrorOutputHandler extends OutputHandler {
private final static String ERR_FILE_SUFFIX = ".err";
private final boolean abortOnError;
private final int fileLimitMB;
private final int fileCount;
private final Map fileHandlers;
public ErrorOutputHandler(boolean abortOnError,
String outputDir,
int fileLimitMB,
int fileCount) {
super(outputDir, true);
this.abortOnError = abortOnError;
this.fileLimitMB = fileLimitMB;
this.fileCount = fileCount;
fileHandlers = new HashMap();
}
public void error(DataSource source,
String entry,
String message,
RuntimeException cause) {
if (errorCheckHook != null) {
errorCheckHook.doHook(cause);
}
String errMsg = (message != null) ? message : "Migrate entry failed";
if (outputDir != null) {
String msg = errMsg + ": " + cause.getMessage();
writeLine(source, entry, msg);
}
if (abortOnError) {
WrappedLoadException wle =
new WrappedLoadException(source, errMsg, entry, cause);
logger.log(Level.SEVERE, message, wle);
throw wle;
}
if (logger.isLoggable(Level.WARNING)) {
logger.log(Level.WARNING,
"[" + source.getName() + "] " + errMsg + " : " + entry,
cause);
}
}
public void warning(DataSource source, String entry, String msg) {
if (outputDir != null) {
writeLine(source, entry, msg);
} else {
if (logger.isLoggable(Level.WARNING)) {
logger.log(Level.WARNING,
"[" + source.getName() + "] " + msg + ": " + entry);
}
}
}
@Override
public void close() {
for (LimitedFile fhdl: fileHandlers.values()) {
fhdl.close();
}
super.close();
}
private int getFileLimitMB() {
return (fileLimitMB > 0) ? fileLimitMB : DEF_FILE_SIZE_LIMIT_MB;
}
private int getFileCount() {
return (fileCount > 0) ? fileCount : DEF_FILE_NUM_LIMIT;
}
private void writeLine(DataSource source, String entry, String msg) {
final LimitedFile file = getErrorFile(source);
final String now = formatTimestamp(System.currentTimeMillis());
final String text = String.format("%s %s: %s", now, entry, msg);
try {
file.writeLine(text);
} catch (FileCountLimitException fcle) {
throw new IllegalArgumentException(
"Too many errors during migration, the number of error " +
"files reaches the max count: " + fcle.getCount());
}
}
private LimitedFile getErrorFile(DataSource source) {
final String name = getStateFileName(source, ERR_FILE_SUFFIX);
LimitedFile fhandler = fileHandlers.get(name);
if (fhandler != null) {
return fhandler;
}
synchronized(this) {
fhandler = fileHandlers.get(name);
if (fhandler != null) {
return fhandler;
}
final File file = getOutputFile(name, false);
fhandler = new LimitedFile(file, false /* append */,
mbyteToByte(getFileLimitMB()),
getFileCount());
fileHandlers.put(name, fhandler);
return fhandler;
}
}
}
private static String getStateFileName(DataSource source, String suffix) {
String name = formatName(source.getName());
if (suffix == null) {
return name;
}
return name + suffix;
}
public static void setErrorCheckHook(TestHook testHook) {
errorCheckHook = testHook;
}
public static class WrappedLoadException extends RuntimeException{
private static final long serialVersionUID = 1L;
private final DataSource source;
private final String entry;
WrappedLoadException(DataSource source,
String message,
String entry,
RuntimeException cause) {
super(message, cause);
this.source = source;
this.entry = entry;
}
DataSource getSource() {
return source;
}
String getEntry() {
return entry;
}
@Override
public String getMessage() {
return getMessage(true);
}
public String getMessage(boolean withSource) {
StringBuilder sb = new StringBuilder(super.getMessage());
if (entry != null) {
sb.append(" ");
sb.append(entry);
}
if (withSource && source != null) {
sb.append(" in ");
sb.append(source.getName());
}
sb.append(": ");
sb.append(getCause().getMessage());
return sb.toString();
}
}
/**
* Acquires an exclusive lock on output directory.
*/
private class DirectoryLocker {
/* The name of lock file in checkpoint directory */
private static final String LOCK_FILE = "wr.lck";
/* Use for locking output files directory */
private RandomAccessFile lockFile;
private FileChannel lockChannel;
private FileLock envLock;
DirectoryLocker(File dir) {
lockDir(dir);
}
private void lockDir(File dir) {
try {
lockFile = new RandomAccessFile(new File(dir, LOCK_FILE), "rwd");
lockChannel = lockFile.getChannel();
final String msg = "Another migrator process is already " +
"running. Failed to acquire a lock on files directory: " +
toFilePath(dir);
try {
envLock = lockChannel.tryLock(1, 1, false);
if (envLock == null) {
throw new IllegalStateException(msg);
}
} catch (OverlappingFileLockException ofle) {
logger.log(Level.SEVERE, msg, ofle);
throw new IllegalStateException(msg, ofle);
}
} catch (IOException ioe) {
String msg = "Failed to open files directory: " + dir;
logger.log(Level.SEVERE, msg, ioe);
throw new IllegalArgumentException(msg, ioe);
} catch (SecurityException se) {
String msg = "Failed to open files directory: " + dir;
logger.log(Level.SEVERE, msg, se);
throw new IllegalArgumentException(msg, se);
}
}
/**
* Releases the lock on checkpoint files directory.
*/
void release() {
try {
if (envLock != null) {
envLock.release();
envLock = null;
}
if (lockChannel != null) {
lockChannel.close();
lockChannel = null;
}
if (lockFile != null) {
lockFile.close();
lockFile = null;
}
} catch (IOException ignored) {
}
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy