
org.xlcloud.commons.compress.CompressUtils Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of compress-utils Show documentation
Show all versions of compress-utils Show documentation
This module is provides the common compress utility based on apache commons compress
The newest version!
/*
* Copyright 2012 AMG.lab, a Bull Group Company
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.xlcloud.commons.compress;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;
import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.ArchiveException;
import org.apache.commons.compress.archivers.ArchiveInputStream;
import org.apache.commons.compress.archivers.ArchiveStreamFactory;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.xlcloud.compress.exception.DuplicatedArchiveEntryException;
import org.xlcloud.compress.exception.DuplicatedDirectoryException;
/**
* Commons class for compression, decompression of files
*
* @author Michał Kamiński, AMG.net
* @author Konrad Król, AMG.net
*/
public class CompressUtils {
/**
* This method creates tar.gz archive containing elements passed in the list.
* This method creates all required directories.
* If directory or target file already exists it is overwritten.
*
* @param archiveElements - elements which should be added to archive
* @param archiveDirectory - directory in the archive where all archiveElements should be placed.
* Pass empty string if archive elements should be placed to the root of archive file.
* @param targetFile - target path where tar.gz archive should be created
*
* @throws IOException - if failed to delete existing directories,
* if failed to create required directories
* if failed to open targetPath for writing,
* if any of the elements was not found or could not be opened
*
* @throws DuplicatedArchiveEntryException - if failed to create archive due to archive elements names duplication
*/
public static void forceCreateArchive(List archiveElements, String archiveDirectory, File targetFile) throws IOException, DuplicatedArchiveEntryException {
File directory = targetFile.getParentFile();
if(directory != null) {
FileUtils.deleteDirectory(directory);
FileUtils.forceMkdir(directory);
}
createArchive(archiveElements, archiveDirectory, targetFile);
}
/**
* This method creates tar.gz archive containing elements passed in the list.
* It requires the existence of path directories.
* If target file already exists then IOException is thrown.
*
* @param archiveElements - elements which should be added to archive
* @param archiveDirectory - directory in the archive where all archiveElements should be placed.
* Pass empty string if archive elements should be placed to the root of archive file.
* @param targetFile - target path where tar.gz archive should be created
*
* @throws IOException - if target file already exists,
* if required directory path does not exists,
* if failed to open targetPath for writing,
* if any of the elements was not found or could not be opened
* @throws DuplicatedArchiveEntryException - if failed to create archive due to archive elements names duplication
*/
public static void createArchive(List archiveElements, String archiveDirectory, File targetFile) throws IOException, DuplicatedArchiveEntryException {
validateArchiveEntriesUniquness(archiveElements);
if(targetFile.exists()) {
throw new IOException("File: " + targetFile.getAbsolutePath() + " already exists");
}
TarArchiveOutputStream archive = null;
try {
archive = new TarArchiveOutputStream(new GZIPOutputStream(new BufferedOutputStream(new FileOutputStream(targetFile))));
for(File archiveElement : archiveElements) {
addFileToArchive(archive, archiveElement, archiveDirectory);
}
} finally {
IOUtils.closeQuietly(archive);
}
}
/**
* creates tar.gz archive for a directory and write this to stream pased as
* parameter
*
* @param rootDir
* @return
*/
public static OutputStream writeArchiveToStream(File rootDir, OutputStream stream) throws IOException {
BufferedOutputStream bufferedOutputStream = null;
TarArchiveOutputStream archiveStream = null;
try {
bufferedOutputStream = new BufferedOutputStream(stream);
archiveStream = new TarArchiveOutputStream(new GZIPOutputStream(bufferedOutputStream));
addFileToArchive(archiveStream, rootDir.getAbsoluteFile(), "");
return archiveStream;
} finally {
IOUtils.closeQuietly(archiveStream);
IOUtils.closeQuietly(bufferedOutputStream);
}
}
/**
* Creates tar.gz entry for file recursively
*
* @param archiveStream
* @param absoluteFile
* @param string
* @throws IOException
*/
private static void addFileToArchive(TarArchiveOutputStream archiveStream, File file, String base) throws IOException {
String entryName = base + file.getName();
TarArchiveEntry tarEntry = new TarArchiveEntry(file, entryName);
archiveStream.putArchiveEntry(tarEntry);
if (file.isFile()) {
FileInputStream fInputStream = null;
try {
fInputStream = new FileInputStream(file);
IOUtils.copy(fInputStream, archiveStream);
archiveStream.closeArchiveEntry();
} finally {
IOUtils.closeQuietly(fInputStream);
}
} else {
archiveStream.closeArchiveEntry();
File[] children = file.listFiles();
if (children != null) {
for (File child : children) {
addFileToArchive(archiveStream, child, entryName + "/");
}
}
}
}
/**
* Method extracts stream for a destination file. When a file with the same
* name exists for an applcation it throw
* {@link DuplicatedDirectoryException}. Filename is only to check stream
* format.
*
* @param stream
* @param filename
* @param path
* @return
*/
public static File extractArchiveStream(InputStream stream, String path) throws IOException, ArchiveException,
DuplicatedDirectoryException {
File rootDir = new File(path);
rootDir.mkdirs();
InputStream bufferedStream = normalizeStream(stream);
ArchiveInputStream input = new ArchiveStreamFactory().createArchiveInputStream(bufferedStream);
ArchiveEntry archiveEntry = null;
String baseFoler = null;
while ((archiveEntry = input.getNextEntry()) != null) {
if (baseFoler == null) {
baseFoler = archiveEntry.getName();
if (new File(path + baseFoler).exists()) {
throw new DuplicatedDirectoryException("Directory with name: " + baseFoler + " already exists");
}
}
if (!archiveEntry.isDirectory() && archiveEntry.getSize() > 0) {
File extractFile = new File(path + "/" + archiveEntry.getName());
byte[] entryContent = new byte[(int) archiveEntry.getSize()];
input.read(entryContent, 0, entryContent.length);
extractFile.getParentFile().mkdirs();
extractFile.createNewFile();
IOUtils.write(entryContent, new FileOutputStream(extractFile));
}
}
return new File(path + baseFoler);
}
/**
* Method lists archive files
*
* @param stream
* @param filename
* @param path
* @return
*/
public static List listArchiveFiles(InputStream stream) throws IOException, ArchiveException {
InputStream bufferedStream = normalizeStream(stream);
ArchiveInputStream input = new ArchiveStreamFactory().createArchiveInputStream(bufferedStream);
List archiveFiles = new ArrayList();
ArchiveEntry archiveEntry = null;
while ((archiveEntry = input.getNextEntry()) != null) {
archiveFiles.add(archiveEntry.getName());
}
return archiveFiles;
}
/**
* Checks if stream is Gziped and returns BufferedInputStream - valid stream
* for commons comress.
*
* @param stream
* @return
* @throws IOException
*/
private static InputStream normalizeStream(InputStream stream) throws IOException {
stream = new BufferedInputStream(stream);
if (isGZipped(stream)) {
stream = new BufferedInputStream(new GZIPInputStream(stream));
}
return stream;
}
/**
* Checks if an input stream is gzipped.
*
* @param in
* @return
*/
public static boolean isGZipped(InputStream in) {
if (!in.markSupported()) {
in = new BufferedInputStream(in);
}
in.mark(0);
int magic = 0;
try {
magic = in.read() & 0xff | ((in.read() << 8) & 0xff00);
in.reset();
} catch (IOException e) {
e.printStackTrace(System.err);
return false;
}
return magic == GZIPInputStream.GZIP_MAGIC;
}
private static void validateArchiveEntriesUniquness(List archiveEntries) throws DuplicatedArchiveEntryException {
Set fileNames = new HashSet();
for(File archiveEntry : archiveEntries) {
if(fileNames.contains(archiveEntry.getName())) {
throw new DuplicatedArchiveEntryException("Archive entry: " + archiveEntry.getName() + " is duplicated.");
} else {
fileNames.add(archiveEntry.getName());
}
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy