org.fabric3.scanner.impl.ContributionDirectoryScanner Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of fabric3-contribution-scanner Show documentation
Show all versions of fabric3-contribution-scanner Show documentation
Fabric3 Contribution Scanner Implementation.
/*
* Fabric3
* Copyright ? 2008 Metaform Systems Limited
*
* This proprietary software may be used only connection with the Fabric3 license
* (the ?License?), a copy of which is included in the software or may be
* obtained at: http://www.metaformsystems.com/licenses/license.html.
* Software distributed under the License is distributed on an ?as is? basis,
* without warranties or conditions of any kind. See the License for the
* specific language governing permissions and limitations of use of the software.
* This software is distributed in conjunction with other software licensed under
* different terms. See the separate licenses for those programs included in the
* distribution for the permitted and restricted uses of such software.
*
*/
package org.fabric3.scanner.impl;
import java.io.File;
import java.io.IOException;
import java.net.URI;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import org.osoa.sca.annotations.Destroy;
import org.osoa.sca.annotations.EagerInit;
import org.osoa.sca.annotations.Init;
import org.osoa.sca.annotations.Property;
import org.osoa.sca.annotations.Reference;
import org.osoa.sca.annotations.Service;
import org.fabric3.api.annotation.Monitor;
import org.fabric3.host.contribution.ContributionException;
import org.fabric3.host.contribution.ContributionService;
import org.fabric3.host.contribution.ContributionSource;
import org.fabric3.host.contribution.Deployable;
import org.fabric3.host.contribution.FileContributionSource;
import org.fabric3.host.contribution.ValidationException;
import org.fabric3.host.domain.AssemblyException;
import org.fabric3.host.domain.DeploymentException;
import org.fabric3.host.domain.Domain;
import org.fabric3.host.domain.UndeploymentException;
import org.fabric3.host.runtime.HostInfo;
import org.fabric3.scanner.spi.FileSystemResource;
import org.fabric3.scanner.spi.FileSystemResourceFactoryRegistry;
import org.fabric3.spi.services.VoidService;
import org.fabric3.spi.services.event.DomainRecover;
import org.fabric3.spi.services.event.EventService;
import org.fabric3.spi.services.event.Fabric3Event;
import org.fabric3.spi.services.event.Fabric3EventListener;
import org.fabric3.spi.services.event.RuntimeStart;
/**
* Periodically scans a directory for new, updated, or removed contributions. New contributions are added to the domain and any deployable components
* activated. Updated components will trigger re-activation of previously deployed components. Removal will remove the contribution from the domain
* and de-activate any associated deployed components.
*
* The scanner watches the deployment directory at a fixed-delay interval. Files are tracked as a {@link FileSystemResource}, which provides a
* consistent metadata view across various types such as jars and exploded directories. Unknown file types are ignored. At the specified interval,
* removed files are determined by comparing the current directory contents with the contents from the previous pass. Changes or additions are also
* determined by comparing the current directory state with that of the previous pass. Detected changes and additions are cached for the following
* interval. Detected changes and additions from the previous interval are then checked using a checksum to see if they have changed again. If so,
* they remain cached. If they have not changed, they are processed, contributed via the ContributionService, and deployed in the domain.
*
* The scanner also participates in recovery on a controller in a distributed domain and in a single-VM runtime. The scanner listens for a
* DomainRecover event and initiates a recovery operation against the Domain service for all contributions present in the deployment directory.
*/
@Service(VoidService.class)
@EagerInit
public class ContributionDirectoryScanner implements Runnable, Fabric3EventListener {
private final Map cache = new HashMap();
private final Map errorCache = new HashMap();
private final ContributionService contributionService;
private final EventService eventService;
private final ScannerMonitor monitor;
private final Domain domain;
private Map processed = new HashMap();
private FileSystemResourceFactoryRegistry registry;
private File path;
private long delay = 2000;
private ScheduledExecutorService executor;
public ContributionDirectoryScanner(@Reference FileSystemResourceFactoryRegistry registry,
@Reference ContributionService contributionService,
@Reference(name = "assembly") Domain domain,
@Reference EventService eventService,
@Reference HostInfo info,
@Monitor ScannerMonitor monitor) {
this.registry = registry;
this.contributionService = contributionService;
this.domain = domain;
this.eventService = eventService;
path = new File(info.getBaseDir(), "deploy");
this.monitor = monitor;
}
@Property(required = false)
public void setPath(String dir) {
this.path = new File(dir);
}
@Property(required = false)
public void setDelay(long delay) {
this.delay = delay;
}
@SuppressWarnings({"unchecked"})
@Init
public void init() {
eventService.subscribe(DomainRecover.class, this);
// register to be notified when the runtime starts so the scanner thread can be initialized
eventService.subscribe(RuntimeStart.class, this);
}
@Destroy
public void destroy() {
executor.shutdownNow();
}
public void onEvent(Fabric3Event event) {
if (event instanceof DomainRecover) {
// process existing files in recovery mode
File[] files = path.listFiles();
recover(files);
} else if (event instanceof RuntimeStart) {
executor = Executors.newSingleThreadScheduledExecutor();
executor.scheduleWithFixedDelay(this, 10, delay, TimeUnit.MILLISECONDS);
}
}
public synchronized void run() {
if (!path.isDirectory()) {
// there is no extension directory, return without processing
return;
}
try {
File[] files = path.listFiles();
processRemovals(files);
processFiles(files);
} catch (RuntimeException e) {
monitor.error(e);
}
}
private synchronized void recover(File[] files) {
try {
List contributions = new ArrayList();
for (File file : files) {
String name = file.getName();
FileSystemResource resource = null;
FileSystemResource cached = errorCache.get(name);
if (cached != null) {
resource = registry.createResource(file);
assert resource != null;
resource.reset();
if (Arrays.equals(cached.getChecksum(), resource.getChecksum())) {
// corrupt file from a previous run, continue
continue;
} else {
// file has changed since the error was reported, retry
errorCache.remove(name);
}
}
cached = cache.get(name);
if (cached == null) {
// the file has been added
if (resource == null) {
resource = registry.createResource(file);
}
if (resource == null) {
// not a known type, ignore
continue;
}
resource.reset();
// cache the resource and wait to the next run to see if it has changed
cache.put(name, resource);
contributions.add(file);
}
}
processAdditions(contributions, true);
} catch (IOException e) {
monitor.error(e);
}
}
private synchronized void processFiles(File[] files) {
boolean wait = false;
List ignored = new ArrayList();
for (File file : files) {
try {
String name = file.getName();
FileSystemResource resource = null;
FileSystemResource cached = errorCache.get(name);
if (cached != null) {
resource = registry.createResource(file);
assert resource != null;
resource.reset();
if (Arrays.equals(cached.getChecksum(), resource.getChecksum())) {
// corrupt file from a previous run, continue
continue;
} else {
// file has changed since the error was reported, retry
errorCache.remove(name);
}
}
cached = cache.get(name);
if (cached == null) {
// the file has been added
if (resource == null) {
resource = registry.createResource(file);
}
if (resource == null) {
// not a known type, ignore
ignored.add(file);
continue;
}
resource.reset();
// cache the resource and wait to the next run to see if it has changed
cache.put(name, resource);
wait = true;
} else {
// already cached from a previous run
if (cached.isChanged()) {
// contents are still being updated, wait until next run
wait = true;
}
}
} catch (IOException e) {
monitor.error(e);
}
}
if (!wait) {
sortAndProcessChanges(files, ignored);
}
}
private void sortAndProcessChanges(File[] files, List ignored) {
try {
List updates = new ArrayList();
List additions = new ArrayList();
for (File file : files) {
// check if it is in the store
String name = file.getName();
boolean isProcessed = processed.containsKey(name);
boolean isError = errorCache.containsKey(name);
if (!isError && isProcessed && !ignored.contains(file)) {
// updated
updates.add(file);
} else if (!isError && !isProcessed && !ignored.contains(file)) {
// an addition
additions.add(file);
}
}
processUpdates(updates);
processAdditions(additions, false);
} catch (IOException e) {
monitor.error(e);
}
}
private synchronized void processUpdates(List files) throws IOException {
for (File file : files) {
String name = file.getName();
URI artifactUri = processed.get(name);
URL location = file.toURI().normalize().toURL();
FileSystemResource cached = cache.remove(name);
byte[] checksum = cached.getChecksum();
long timestamp = file.lastModified();
long previousTimestamp = contributionService.getContributionTimestamp(artifactUri);
if (timestamp > previousTimestamp) {
try {
ContributionSource source = new FileContributionSource(artifactUri, location, timestamp, checksum);
contributionService.update(source);
} catch (ContributionException e) {
errorCache.put(name, cached);
monitor.error(e);
}
monitor.updated(artifactUri.toString());
}
// TODO undeploy and redeploy
}
}
private synchronized void processAdditions(List files, boolean recover) throws IOException {
List sources = new ArrayList();
List addedResources = new ArrayList();
for (File file : files) {
String name = file.getName();
FileSystemResource cached = cache.remove(name);
addedResources.add(cached);
URL location = file.toURI().normalize().toURL();
byte[] checksum = cached.getChecksum();
long timestamp = file.lastModified();
try {
ContributionSource source = new FileContributionSource(URI.create(name), location, timestamp, checksum);
sources.add(source);
} catch (NoClassDefFoundError e) {
errorCache.put(name, cached);
monitor.error(e);
}
}
if (!sources.isEmpty()) {
try {
// install contributions, which will be ordered transitively by import dependencies
List addedUris = contributionService.contribute(sources);
// activate the contributions by including deployables in a synthesized composite. This will ensure components are started according
// to dependencies even if a dependent component is defined in a different contribution.
if (recover) {
domain.recover(addedUris);
} else {
domain.include(addedUris, false);
}
for (URI uri : addedUris) {
String name = uri.toString();
// URI is the file name
processed.put(name, uri);
monitor.deployed(name);
}
} catch (ValidationException e) {
// print out the validation errors
monitor.contributionErrors(e.getMessage());
// FIXME for now, just error all additions
for (FileSystemResource cached : addedResources) {
errorCache.put(cached.getName(), cached);
}
} catch (AssemblyException e) {
// print out the deployment errors
monitor.deploymentErrors(e.getMessage());
// FIXME for now, just error all additions
for (FileSystemResource cached : addedResources) {
errorCache.put(cached.getName(), cached);
}
} catch (ContributionException e) {
// FIXME for now, just error all additions
for (FileSystemResource cached : addedResources) {
errorCache.put(cached.getName(), cached);
}
monitor.error(e);
} catch (DeploymentException e) {
// FIXME for now, just error all additions
for (FileSystemResource cached : addedResources) {
errorCache.put(cached.getName(), cached);
}
monitor.error(e);
} catch (RuntimeException e) {
// FIXME for now, just error all additions
for (FileSystemResource cached : addedResources) {
errorCache.put(cached.getName(), cached);
}
monitor.error(e);
throw e;
}
}
}
private synchronized void processRemovals(File[] files) {
Map index = new HashMap(files.length);
for (File file : files) {
index.put(file.getName(), file);
}
List removed = new ArrayList();
for (Map.Entry entry : processed.entrySet()) {
String filename = entry.getKey();
URI uri = entry.getValue();
if (index.get(filename) == null) {
// artifact was removed
try {
// check that the resurce was not deleted by another process
if (contributionService.exists(uri)) {
List deployables = contributionService.getDeployables(uri);
for (Deployable deployable : deployables) {
domain.undeploy(deployable.getName());
}
contributionService.uninstall(uri);
contributionService.remove(uri);
}
removed.add(filename);
monitor.removed(filename);
} catch (ContributionException e) {
monitor.removalError(filename, e);
} catch (UndeploymentException e) {
monitor.removalError(filename, e);
}
}
}
for (String removedName : removed) {
processed.remove(removedName);
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy