org.apache.maven.archiva.consumers.database.ArtifactUpdateDatabaseConsumer Maven / Gradle / Ivy
package org.apache.maven.archiva.consumers.database;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.maven.archiva.configuration.ArchivaConfiguration;
import org.apache.maven.archiva.configuration.ConfigurationNames;
import org.apache.maven.archiva.configuration.FileTypes;
import org.apache.maven.archiva.configuration.ManagedRepositoryConfiguration;
import org.apache.maven.archiva.consumers.AbstractMonitoredConsumer;
import org.apache.maven.archiva.consumers.ConsumerException;
import org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer;
import org.apache.maven.archiva.database.ArchivaDAO;
import org.apache.maven.archiva.database.ArchivaDatabaseException;
import org.apache.maven.archiva.model.ArchivaArtifact;
import org.apache.maven.archiva.model.ArtifactReference;
import org.apache.maven.archiva.repository.ManagedRepositoryContent;
import org.apache.maven.archiva.repository.RepositoryContentFactory;
import org.apache.maven.archiva.repository.RepositoryException;
import org.apache.maven.archiva.repository.layout.LayoutException;
import org.codehaus.plexus.digest.Digester;
import org.codehaus.plexus.digest.DigesterException;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.Initializable;
import org.codehaus.plexus.personality.plexus.lifecycle.phase.InitializationException;
import org.codehaus.plexus.registry.Registry;
import org.codehaus.plexus.registry.RegistryListener;
import java.io.File;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
/**
* ArtifactUpdateDatabaseConsumer - Take an artifact off of disk and put it into the repository.
*
* @author Joakim Erdfelt
* @version $Id: ArtifactUpdateDatabaseConsumer.java 585576 2007-10-17 17:01:25Z joakime $
* @plexus.component role="org.apache.maven.archiva.consumers.KnownRepositoryContentConsumer"
* role-hint="update-db-artifact"
* instantiation-strategy="per-lookup"
*/
public class ArtifactUpdateDatabaseConsumer
extends AbstractMonitoredConsumer
implements KnownRepositoryContentConsumer, RegistryListener, Initializable
{
private static final String TYPE_NOT_ARTIFACT = "file-not-artifact";
private static final String DB_ERROR = "db-error";
private static final String CHECKSUM_CALCULATION = "checksum-calc";
/**
* @plexus.configuration default-value="update-db-artifact"
*/
private String id;
/**
* @plexus.configuration default-value="Update the Artifact in the Database"
*/
private String description;
/**
* @plexus.requirement role-hint="jdo"
*/
private ArchivaDAO dao;
/**
* @plexus.requirement
*/
private ArchivaConfiguration configuration;
/**
* @plexus.requirement
*/
private FileTypes filetypes;
/**
* @plexus.requirement
*/
private RepositoryContentFactory repositoryFactory;
/**
* @plexus.requirement role-hint="sha1"
*/
private Digester digestSha1;
/**
* @plexus.requirement role-hint="md5";
*/
private Digester digestMd5;
private ManagedRepositoryContent repository;
private File repositoryDir;
private List includes = new ArrayList();
public String getId()
{
return this.id;
}
public String getDescription()
{
return this.description;
}
public boolean isPermanent()
{
return true;
}
public List getExcludes()
{
return null;
}
public List getIncludes()
{
return this.includes;
}
public void beginScan( ManagedRepositoryConfiguration repo )
throws ConsumerException
{
try
{
this.repository = repositoryFactory.getManagedRepositoryContent( repo.getId() );
this.repositoryDir = new File( repository.getRepoRoot() );
}
catch(RepositoryException e)
{
throw new ConsumerException( "Unable to start ArtifactUpdateDatabaseConsumer: " + e.getMessage(), e );
}
}
public void processFile( String path )
throws ConsumerException
{
ArchivaArtifact artifact = getLiveArtifact( path );
if ( artifact == null )
{
return;
}
try
{
artifact.getModel().setRepositoryId( this.repository.getId() );
// Calculate the hashcodes.
File artifactFile = new File( this.repositoryDir, path );
try
{
artifact.getModel().setChecksumMD5( digestMd5.calc( artifactFile ) );
}
catch ( DigesterException e )
{
triggerConsumerWarning( CHECKSUM_CALCULATION,
"Unable to calculate the MD5 checksum: " + e.getMessage() );
}
try
{
artifact.getModel().setChecksumSHA1( digestSha1.calc( artifactFile ) );
}
catch ( DigesterException e )
{
triggerConsumerWarning( CHECKSUM_CALCULATION,
"Unable to calculate the SHA1 checksum: " + e.getMessage() );
}
artifact.getModel().setLastModified( new Date( artifactFile.lastModified() ) );
artifact.getModel().setSize( artifactFile.length() );
artifact.getModel().setOrigin( "FileSystem" );
dao.getArtifactDAO().saveArtifact( artifact );
}
catch ( ArchivaDatabaseException e )
{
triggerConsumerError( DB_ERROR, "Unable to save artifact to database: " + e.getMessage() );
}
}
/**
* Get a Live Artifact from a Path.
*
* Will resolve the artifact details from the path, and then return a database live version
* of that artifact. Suitable for modification and saving (without the need to check for
* existance in database prior to save.)
*
* @param path the path to work from.
* @return the artifact that is suitable for database saving.
*/
public ArchivaArtifact getLiveArtifact( String path )
{
try
{
ArtifactReference artifact = repository.toArtifactReference( path );
ArchivaArtifact liveArtifact = dao.getArtifactDAO().createArtifact( artifact.getGroupId(),
artifact.getArtifactId(),
artifact.getVersion(),
artifact.getClassifier(),
artifact.getType() );
return liveArtifact;
}
catch ( LayoutException e )
{
triggerConsumerError( TYPE_NOT_ARTIFACT,
"Path " + path + " cannot be converted to artifact: " + e.getMessage() );
return null;
}
}
public void completeScan()
{
/* do nothing */
}
public void afterConfigurationChange( Registry registry, String propertyName, Object propertyValue )
{
if ( ConfigurationNames.isRepositoryScanning( propertyName ) )
{
initIncludes();
}
}
public void beforeConfigurationChange( Registry registry, String propertyName, Object propertyValue )
{
/* do nothing */
}
private void initIncludes()
{
includes.clear();
includes.addAll( filetypes.getFileTypePatterns( FileTypes.ARTIFACTS ) );
}
public void initialize()
throws InitializationException
{
configuration.addChangeListener( this );
initIncludes();
}
}
© 2015 - 2024 Weber Informatics LLC | Privacy Policy