
de.huxhorn.sulky.codec.filebuffer.CodecFileBuffer Maven / Gradle / Ivy
/*
* sulky-modules - several general-purpose modules.
* Copyright (C) 2007-2018 Joern Huxhorn
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program. If not, see .
*/
/*
* Copyright 2007-2018 Joern Huxhorn
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.huxhorn.sulky.codec.filebuffer;
import de.huxhorn.sulky.buffers.BasicBufferIterator;
import de.huxhorn.sulky.buffers.Dispose;
import de.huxhorn.sulky.buffers.DisposeOperation;
import de.huxhorn.sulky.buffers.ElementProcessor;
import de.huxhorn.sulky.buffers.FileBuffer;
import de.huxhorn.sulky.buffers.Reset;
import de.huxhorn.sulky.buffers.SetOperation;
import de.huxhorn.sulky.codec.Codec;
import java.io.File;
import java.io.IOException;
import java.io.InvalidClassException;
import java.io.RandomAccessFile;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* In contrast to SerializingFileBuffer, this implementation supports the following:
*
*
* - An optional magic value to identify the type of a buffer file.
* If present (and it should be), it is contained in the first four bytes of the data-file
* and can be evaluated by external classes, e.g. FileFilters.
* An application would use one (or more) specific magic value to identify it's own files.
*
* - Configurable Codec so the way the elements are actually written and read can be changed as needed.
*
* -
* Optional meta data that can be used to provide additional information about the content of the buffer.
* It might be used to identify the correct Codec required by the buffer
*
* - Optional ElementProcessors that are executed after elements are added to the buffer.
*
*
* TODO: more documentation :p
*
* @param the type of objects that are stored in this buffer.
*/
public class CodecFileBuffer
implements FileBuffer, SetOperation, DisposeOperation
{
private final Logger logger = LoggerFactory.getLogger(CodecFileBuffer.class);
private final ReadWriteLock readWriteLock;
/**
* the file that contains the serialized objects.
*/
private File dataFile;
/**
* index file that contains the number of contained objects as well as the offsets of the objects in the
* serialized file.
*/
private File indexFile;
private static final String INDEX_EXTENSION = ".index";
private Map preferredMetaData;
private Codec codec;
private List> elementProcessors;
private FileHeaderStrategy fileHeaderStrategy;
private int magicValue;
private FileHeader fileHeader;
private boolean preferredSparse;
private DataStrategy dataStrategy;
private IndexStrategy indexStrategy;
/**
* TODO: add description :p
*
* @param magicValue the magic value of the buffer.
* @param sparse whether or not this buffer is sparse, i.e. not continuous.
* @param preferredMetaData the meta data of the buffer. Might be null.
* @param codec the codec used by this buffer. Might be null.
* @param dataFile the data file.
* @param indexFile the index file of the buffer.
*/
public CodecFileBuffer(int magicValue, boolean sparse, Map preferredMetaData, Codec codec, File dataFile, File indexFile)
{
this(magicValue, sparse, preferredMetaData, codec, dataFile, indexFile, new DefaultFileHeaderStrategy());
}
public CodecFileBuffer(int magicValue, boolean preferredSparse, Map preferredMetaData, Codec codec, File dataFile, File indexFile, FileHeaderStrategy fileHeaderStrategy)
{
this.indexStrategy = new DefaultIndexStrategy();
this.magicValue = magicValue;
this.fileHeaderStrategy = fileHeaderStrategy;
this.readWriteLock = new ReentrantReadWriteLock(true);
this.preferredSparse = preferredSparse;
if(preferredMetaData != null)
{
preferredMetaData = new HashMap<>(preferredMetaData);
}
if(preferredMetaData != null)
{
this.preferredMetaData = new HashMap<>(preferredMetaData);
}
this.codec = codec;
setDataFile(dataFile);
if(indexFile == null)
{
File parent = dataFile.getParentFile();
String indexName = dataFile.getName();
int dotIndex = indexName.lastIndexOf('.');
if(dotIndex > 0)
{
// remove extension,
indexName = indexName.substring(0, dotIndex);
}
indexName += INDEX_EXTENSION;
indexFile = new File(parent, indexName);
}
setIndexFile(indexFile);
if(!initFilesIfNecessary())
{
validateHeader();
}
}
private void validateHeader()
{
Lock lock = readWriteLock.readLock();
lock.lock();
try
{
this.fileHeader = null;
FileHeader header = fileHeaderStrategy.readFileHeader(dataFile);
if(header == null)
{
throw new IllegalArgumentException("Could not read file header from file '" + dataFile.getAbsolutePath() + "'. File isn't compatible.");
}
if(header.getMagicValue() != magicValue)
{
throw new IllegalArgumentException("Wrong magic value. Expected 0x" + Integer.toHexString(magicValue) + " but was " + Integer.toHexString(header.getMagicValue()) + "!");
}
if(dataFile.length() > header.getDataOffset() && !indexFile.exists())
{
throw new IllegalArgumentException("dataFile contains data but indexFile " + indexFile.getAbsolutePath() + " is not valid!");
}
setFileHeader(header);
}
catch(IOException ex)
{
throw new IllegalArgumentException("Could not read magic value from file '" + dataFile.getAbsolutePath() + "'!", ex);
}
finally
{
lock.unlock();
}
}
public Codec getCodec()
{
return codec;
}
public void setCodec(Codec codec)
{
this.codec = codec;
}
public List> getElementProcessors()
{
if(elementProcessors == null)
{
return null;
}
return Collections.unmodifiableList(elementProcessors);
}
public void setElementProcessors(List> elementProcessors)
{
if(elementProcessors != null)
{
if(elementProcessors.isEmpty())
{
// performance enhancement
elementProcessors = null;
}
else
{
elementProcessors = new ArrayList<>(elementProcessors);
}
}
this.elementProcessors = elementProcessors;
}
private boolean initFilesIfNecessary()
{
if(!dataFile.exists() || dataFile.length() < fileHeaderStrategy.getMinimalSize())
{
Throwable t=null;
boolean dataDeleted=false;
boolean indexDeleted=false;
Lock lock = readWriteLock.writeLock();
lock.lock();
try
{
dataDeleted=dataFile.delete();
setFileHeader(fileHeaderStrategy.writeFileHeader(dataFile, magicValue, preferredMetaData, preferredSparse));
indexDeleted=indexFile.delete();
}
catch(IOException e)
{
t=e;
}
finally
{
lock.unlock();
}
if(!indexDeleted)
{
if(logger.isDebugEnabled()) logger.debug("Couldn't delete index file {}.", indexFile.getAbsolutePath()); // NOPMD
}
if(!dataDeleted)
{
if(logger.isDebugEnabled()) logger.debug("Couldn't delete data file {}.", dataFile.getAbsolutePath()); // NOPMD
}
if(t!=null)
{
if(logger.isWarnEnabled()) logger.warn("Exception while initializing files!", t);
return false;
}
return true;
}
return false;
}
public FileHeader getFileHeader()
{
return fileHeader;
}
/**
* @return the preferred meta data of the buffer, as defined by c'tor.
*/
public Map getPreferredMetaData()
{
if(preferredMetaData == null)
{
return null;
}
return Collections.unmodifiableMap(preferredMetaData);
}
@Override
public File getDataFile()
{
return dataFile;
}
public File getIndexFile()
{
return indexFile;
}
@Override
public long getSize()
{
if(!indexFile.canRead())
{
return 0;
}
Throwable throwable;
Lock lock = readWriteLock.readLock();
lock.lock(); // FindBugs "Multithreaded correctness - Method does not release lock on all exception paths" is a false positive
try(RandomAccessFile raf = new RandomAccessFile(indexFile, "r"))
{
return indexStrategy.getSize(raf);
}
catch(Throwable e)
{
throwable = e;
}
finally
{
lock.unlock();
}
// it's a really bad idea to log while locked *sigh*
if(logger.isDebugEnabled()) logger.debug("Couldn't retrieve size!", throwable);
return 0;
}
/**
* If no element is found, null is returned.
*
* @param index must be in the range [0..(getSize()-1)]
.
* @return the element at the given index.
* @throws IllegalStateException if no Decoder has been set.
*/
@Override
public E get(long index)
{
if(!dataFile.canRead() || !indexFile.canRead())
{
return null;
}
Lock lock = readWriteLock.readLock();
lock.lock();
Throwable throwable;
try(RandomAccessFile randomSerializeIndexFile = new RandomAccessFile(indexFile, "r");
RandomAccessFile randomSerializeFile = new RandomAccessFile(dataFile, "r"))
{
return dataStrategy.get(index, randomSerializeIndexFile, randomSerializeFile, codec, indexStrategy);
}
catch(Throwable e)
{
throwable = e;
}
finally
{
lock.unlock();
}
// it's a really bad idea to log while locked *sigh*
if(throwable instanceof ClassNotFoundException
|| throwable instanceof InvalidClassException)
{
if(logger.isWarnEnabled()) logger.warn("Couldn't deserialize object at index {}!\n{}", index, throwable);
}
else if(throwable instanceof ClassCastException)
{
if(logger.isWarnEnabled()) logger.warn("Couldn't cast deserialized object at index {}!\n{}", index, throwable);
}
else
{
if(logger.isWarnEnabled()) logger.warn("Couldn't retrieve element at index {}!", index, throwable);
}
return null;
}
/**
* Adds the element to the end of the buffer.
*
* @param element to add.
* @throws IllegalStateException if no Encoder has been set.
*/
@Override
public void add(E element)
{
initFilesIfNecessary();
Lock lock = readWriteLock.writeLock();
lock.lock();
Throwable throwable = null;
try(RandomAccessFile randomIndexFile = new RandomAccessFile(indexFile, "rw");
RandomAccessFile randomDataFile = new RandomAccessFile(dataFile, "rw"))
{
dataStrategy.add(element, randomIndexFile, randomDataFile, codec, indexStrategy);
// call processors if available
List> localProcessors = elementProcessors;
if(localProcessors != null)
{
for(ElementProcessor current : elementProcessors)
{
current.processElement(element);
}
}
}
catch(IOException e)
{
throwable = e;
}
finally
{
lock.unlock();
}
if(throwable != null)
{
// it's a really bad idea to log while locked *sigh*
if(logger.isWarnEnabled()) logger.warn("Couldn't write element!", throwable); // NOPMD
}
}
/**
* Adds all elements to the end of the buffer.
*
* @param elements to add.
* @throws IllegalStateException if no Encoder has been set.
*/
@Override
public void addAll(List elements)
{
if(elements != null)
{
initFilesIfNecessary();
int newElementCount = elements.size();
if(newElementCount > 0)
{
Lock lock = readWriteLock.writeLock();
lock.lock();
Throwable throwable;
try(RandomAccessFile randomIndexFile = new RandomAccessFile(indexFile, "rw");
RandomAccessFile randomDataFile = new RandomAccessFile(dataFile, "rw")
)
{
dataStrategy.addAll(elements, randomIndexFile, randomDataFile, codec, indexStrategy);
// call processors if available
if(elementProcessors != null)
{
for(ElementProcessor current : elementProcessors)
{
current.processElements(elements);
}
}
return;
}
catch(Throwable e)
{
throwable = e;
}
finally
{
lock.unlock();
}
// it's a really bad idea to log while locked *sigh*
if(logger.isWarnEnabled()) logger.warn("Couldn't write element!", throwable); // NOPMD
}
}
}
@Override
public void addAll(E[] elements)
{
addAll(Arrays.asList(elements));
}
@Override
public void reset()
{
Throwable t=null;
boolean indexDeleted=false;
boolean dataDeleted=false;
Lock lock = readWriteLock.writeLock();
lock.lock();
try
{
indexDeleted=indexFile.delete();
dataDeleted=dataFile.delete();
fileHeaderStrategy.writeFileHeader(dataFile, magicValue, preferredMetaData, preferredSparse);
if(elementProcessors != null)
{
for(ElementProcessor current : elementProcessors)
{
Reset.reset(current);
}
}
}
catch(IOException e)
{
t=e;
}
finally
{
lock.unlock();
}
if(!indexDeleted)
{
if(logger.isDebugEnabled()) logger.debug("Couldn't delete index file {}.", indexFile.getAbsolutePath()); // NOPMD
}
if(!dataDeleted)
{
if(logger.isDebugEnabled()) logger.debug("Couldn't delete data file {}.", dataFile.getAbsolutePath()); // NOPMD
}
if(t != null)
{
if(logger.isWarnEnabled()) logger.warn("Exception while resetting file!", t); // NOPMD
}
}
/**
* @return will always return false, i.e. it does not check for disk space!
*/
@Override
public boolean isFull()
{
return false;
}
@Override
public Iterator iterator()
{
return new BasicBufferIterator<>(this);
}
private void setDataFile(File dataFile)
{
prepareFile(dataFile);
this.dataFile = dataFile;
}
private void setIndexFile(File indexFile)
{
prepareFile(indexFile);
this.indexFile = indexFile;
}
private void prepareFile(File file)
{
File parent = file.getParentFile();
if(parent != null)
{
if(parent.mkdirs())
{
if(logger.isDebugEnabled()) logger.debug("Created directory {}.", parent.getAbsolutePath()); // NOPMD
}
if(!parent.isDirectory())
{
throw new IllegalArgumentException(parent.getAbsolutePath() + " is not a directory!");
}
if(file.isFile() && !file.canWrite())
{
throw new IllegalArgumentException(file.getAbsolutePath() + " is not writable!");
}
}
}
@Override
public String toString()
{
StringBuilder result = new StringBuilder(300);
result.append("CodecFileBuffer[fileHeader=").append(fileHeader)
.append(", preferredMetaData=").append(preferredMetaData)
.append(", dataFile=");
if(dataFile == null)
{
result.append("null");
}
else
{
result.append('"').append(dataFile.getAbsolutePath()).append('"');
}
result.append(", indexFile=");
if(indexFile == null)
{
result.append("null");
}
else
{
result.append('"').append(indexFile.getAbsolutePath()).append('"');
}
result.append(", codec=").append(codec)
.append(']');
return result.toString();
}
@Override
public void dispose()
{
if(elementProcessors != null)
{
for(ElementProcessor current : elementProcessors)
{
Dispose.dispose(current);
}
}
// TODO: implement dispose()
}
@Override
public boolean isDisposed()
{
return false; // TODO: implement isDisposed()
}
private void setFileHeader(FileHeader fileHeader)
{
MetaData metaData = fileHeader.getMetaData();
if(metaData.isSparse())
{
dataStrategy = new SparseDataStrategy<>();
}
else
{
dataStrategy = new DefaultDataStrategy<>();
}
this.fileHeader = fileHeader;
}
@Override
public boolean set(long index, E element)
{
initFilesIfNecessary();
Lock lock = readWriteLock.writeLock();
lock.lock();
Throwable throwable = null;
boolean result = false;
try(RandomAccessFile randomIndexFile = new RandomAccessFile(indexFile, "rw");
RandomAccessFile randomDataFile = new RandomAccessFile(dataFile, "rw"))
{
result = dataStrategy.set(index, element, randomIndexFile, randomDataFile, codec, indexStrategy);
// call processors if available
List> localProcessors = elementProcessors;
if(localProcessors != null)
{
for(ElementProcessor current : elementProcessors)
{
current.processElement(element);
}
}
}
catch(IOException e)
{
throwable = e;
}
finally
{
lock.unlock();
}
if(throwable != null)
{
// it's a really bad idea to log while locked *sigh*
if(logger.isWarnEnabled()) logger.warn("Couldn't write element!", throwable); // NOPMD
}
return result;
}
@Override
public boolean isSetSupported()
{
return dataStrategy != null && dataStrategy.isSetSupported();
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy