net.sf.filePiper.model.ProcessorThread Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of file-piper Show documentation
Show all versions of file-piper Show documentation
This project is a GUI utility for processing files. It allows selecting a set of source files and a pipeline
of processes to apply onto those files. The applications shows in a nice-looking user interface where you can
define profiles for your repetitive tasks.
It provides pre-defined processors doing usual file manipulation tasks like: Copy, Head, Tail, Chunk, Search, Replace, Zip, Unzip...
But the biggest value of this file processor tool is the ability to add easily custom file processors written in java.
The newest version!
package net.sf.filePiper.model;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.PipedInputStream;
import java.io.PipedOutputStream;
import org.apache.log4j.Logger;
public class ProcessorThread extends Thread implements FileProcessorEnvironment, PipeComponent {
private static Logger log = Logger.getLogger(ProcessorThread.class);
private FileProcessor processor;
private InputStream is;
private InputFileInfo inputInfo;
private Pipeline line;
private PipeComponent nextComponent;
private byte[] consumeBuffer = new byte[1024];
private PipelineEnvironment mainReporting;
/** Said that this thread should continue to run (because job is not finished) */
private boolean thisShouldContinue;
public ProcessorThread(FileProcessor p, Pipeline pipeline, PipeComponent nextPipeComponent, PipelineEnvironment reporting) {
super(p.getProcessorName());
line = pipeline;
processor = p;
nextComponent = nextPipeComponent;
mainReporting = reporting;
thisShouldContinue = true;
}
public void run() {
// notify that the batch will start
try {
processor.startBatch(this);
} catch (IOException e) {
log.error("Error in processor " + processor + " startBatch(..) method", e);
mainReporting.finished(e);
thisShouldContinue = false;
}
// process all the input streams that are coming
while (shouldContinue() && thisShouldContinue) {
// wait for an input stream
synchronized (this) {
while (shouldContinue() && thisShouldContinue && (is == null)) {
try {
wait();
} catch (Exception e) {
log.warn("Wait interrupted by exception", e);
}
}
}
// process it
if ((is != null) && shouldContinue()) {
try {
processor.process(is, inputInfo, this);
} catch (Exception e) {
log.error("Error in processor " + processor + " for input: " + inputInfo, e);
mainReporting.finished(e);
thisShouldContinue = false;
} finally {
releaseInputStream();
}
}
}
// notify that the batch is finished
thisShouldContinue = false;
try {
processor.endBatch(this);
} catch (IOException e) {
log.error("Error in processor " + processor + " batch end", e);
mainReporting.finished(e);
}
// notify the next component that there will be no more input for it.
try {
nextComponent.finished();
} catch (IOException e) {
log.error("Error in processor " + nextComponent + " when finished signal forwarded from "+this, e);
mainReporting.finished(e);
}
if (log.isDebugEnabled()) log.debug("End of Thread " + this);
}
private synchronized void releaseInputStream() {
if (is != null) {
try {
// consumes the input stream until the end-of-file (if not already done)
int count;
do {
count = is.read(consumeBuffer);
} while (count > 0);
// then close the stream.
is.close();
} catch (Exception e) {
log.warn("Failed to close input steam in processor " + processor + " for input: " + inputInfo, e);
}
is = null;
if (log.isDebugEnabled()) log.debug(" <<< " + this + " release inputStream for input: " + inputInfo);
}
notifyAll();
}
public String toString() {
return getClass().getSimpleName() + "[" + processor.getProcessorName() + "@" + System.identityHashCode(processor) + "]";
}
// --------------------------- PipeComponent interface implementation ---------------------------
public synchronized void processInputStream(InputStream input, InputFileInfo info) {
// wait for the input stream to be null
while (shouldContinue() && thisShouldContinue && (is != null)) {
try {
wait();
} catch (Exception e) {
log.warn("Wait interrupted by exception", e);
}
}
if (shouldContinue() && thisShouldContinue) {
if (log.isDebugEnabled()) log.debug(" >>> " + this + " starts for input " + info);
is = input;
inputInfo = info;
notifyAll();
}
}
public OutputStream createOutputStream(InputFileInfo info) throws IOException {
PipedInputStream pis = new PipedInputStream();
PipedOutputStream pos = new PipedOutputStream(pis);
processInputStream(pis, info);
return pos;
}
public synchronized void finished() throws IOException {
if (log.isDebugEnabled()) log.debug("<> signal in " + this);
// wait for the current input stream to be finished
while (shouldContinue() && thisShouldContinue && (is != null)) {
try {
wait();
} catch (Exception e) {
log.warn("Wait interrupted by exception", e);
}
}
if (log.isDebugEnabled()) log.debug("||| last input stream done in " + this);
thisShouldContinue = false;
notifyAll();
}
// ------------------------------ FileProcessorEnvironment interface implementation ------------------------------
public OutputStream getOutputStream(InputFileInfo info) throws IOException {
OutputStream out = nextComponent.createOutputStream(info);
return out;
}
/**
* Said that the pipeline processing should continue (until all the threads are finished).
* This will return false if 'abort' was pressed or if an exception occurred in any of the processor.
*/
public boolean shouldContinue() {
return mainReporting.shouldContinue();
}
public Pipeline getPipeline() {
return line;
}
public ExecutionPhase getCurrentPhase() {
if (mainReporting.isAborted()) return ExecutionPhase.ABORTED;
if (mainReporting.isErrored()) return ExecutionPhase.ERRORED;
if (!thisShouldContinue) return ExecutionPhase.DONE;
if (mainReporting.isRunning()) return ExecutionPhase.RUNNING;
return ExecutionPhase.NONE;
}
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy