net.nicoulaj.maven.plugins.checksum.execution.target.CsvSummaryFileTarget Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of maven-checksum-plugin Show documentation
Show all versions of maven-checksum-plugin Show documentation
Compute project artifacts/dependencies/files checksum digests and output them to individual or summary files.
/*
* Copyright 2010 Julien Nicoulaud
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.nicoulaj.maven.plugins.checksum.execution.target;
import org.codehaus.plexus.util.FileUtils;
import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.SortedSet;
import java.util.TreeSet;
/**
* An {@link ExecutionTarget} that writes digests to a CSV file.
*
* @author Julien Nicoulaud
* @since 1.0
*/
public class CsvSummaryFileTarget implements ExecutionTarget
{
/**
* The line separator character.
*/
public static final String LINE_SEPARATOR = System.getProperty( "line.separator" );
/**
* The CSV column separator character.
*/
public static final String CSV_COLUMN_SEPARATOR = ",";
/**
* The CSV comment marker character.
*/
public static final String CSV_COMMENT_MARKER = "#";
/**
* Encoding to use for generated files.
*/
protected String encoding;
/**
* The association file => (algorithm,hashcode).
*/
protected Map> filesHashcodes;
/**
* The set of algorithms encountered.
*/
protected SortedSet algorithms;
/**
* The target file where the summary is written.
*/
protected File summaryFile;
/**
* Build a new instance of {@link CsvSummaryFileTarget}.
*
* @param summaryFile the file to which the summary should be written.
* @param encoding the encoding to use for generated files.
*/
public CsvSummaryFileTarget( File summaryFile, String encoding )
{
this.summaryFile = summaryFile;
this.encoding = encoding;
}
/**
* {@inheritDoc}
*/
public void init()
{
filesHashcodes = new HashMap>();
algorithms = new TreeSet();
}
/**
* {@inheritDoc}
*/
public void write( String digest, File file, String algorithm )
{
// Initialize an entry for the file if needed.
if ( !filesHashcodes.containsKey( file ) )
{
filesHashcodes.put( file, new HashMap() );
}
// Store the algorithm => hashcode mapping for this file.
Map fileHashcodes = filesHashcodes.get( file );
fileHashcodes.put( algorithm, digest );
// Store the algorithm.
algorithms.add( algorithm );
}
/**
* {@inheritDoc}
*/
public void close() throws ExecutionTargetCloseException
{
StringBuilder sb = new StringBuilder();
// Write the CSV file header.
sb.append( CSV_COMMENT_MARKER ).append( "File" );
for ( String algorithm : algorithms )
{
sb.append( CSV_COLUMN_SEPARATOR ).append( algorithm );
}
// Write a line for each file.
for ( File file : filesHashcodes.keySet() )
{
sb.append( LINE_SEPARATOR ).append( file.getName() );
Map fileHashcodes = filesHashcodes.get( file );
for ( String algorithm : algorithms )
{
sb.append( CSV_COLUMN_SEPARATOR );
if ( fileHashcodes.containsKey( algorithm ) )
{
sb.append( fileHashcodes.get( algorithm ) );
}
}
}
// Write the result to the summary file.
try
{
FileUtils.fileWrite( summaryFile.getPath(), encoding, sb.toString() );
}
catch ( IOException e )
{
throw new ExecutionTargetCloseException( e.getMessage() );
}
}
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy