org.apache.karaf.features.internal.region.SubsystemResolver Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of org.apache.karaf.features.core Show documentation
Show all versions of org.apache.karaf.features.core Show documentation
This bundle is the core implementation of the Karaf features support.
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.karaf.features.internal.region;
import java.io.Writer;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import org.apache.felix.utils.collections.DictionaryAsMap;
import org.apache.felix.utils.resource.CapabilityImpl;
import org.apache.felix.utils.resource.CapabilitySet;
import org.apache.felix.utils.resource.RequirementImpl;
import org.apache.felix.utils.resource.ResourceBuilder;
import org.apache.felix.utils.resource.ResourceImpl;
import org.apache.felix.utils.resource.ResourceUtils;
import org.apache.felix.utils.resource.SimpleFilter;
import org.apache.karaf.features.BundleInfo;
import org.apache.karaf.features.Feature;
import org.apache.karaf.features.FeaturesService;
import org.apache.karaf.features.internal.download.DownloadManager;
import org.apache.karaf.features.internal.download.Downloader;
import org.apache.karaf.features.internal.download.StreamProvider;
import org.apache.karaf.features.internal.resolver.ResolverUtil;
import org.apache.karaf.util.json.JsonWriter;
import org.eclipse.equinox.internal.region.StandardRegionDigraph;
import org.eclipse.equinox.region.Region;
import org.eclipse.equinox.region.RegionDigraph;
import org.eclipse.equinox.region.RegionFilterBuilder;
import org.osgi.framework.BundleException;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.Version;
import org.osgi.framework.wiring.BundleRevision;
import org.osgi.namespace.service.ServiceNamespace;
import org.osgi.resource.Capability;
import org.osgi.resource.Requirement;
import org.osgi.resource.Resource;
import org.osgi.resource.Wire;
import org.osgi.service.repository.Repository;
import org.osgi.service.resolver.Resolver;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.karaf.features.internal.resolver.ResourceUtils.TYPE_FEATURE;
import static org.apache.karaf.features.internal.resolver.ResourceUtils.TYPE_SUBSYSTEM;
import static org.apache.karaf.features.internal.util.MapUtils.invert;
import static org.osgi.framework.Constants.PROVIDE_CAPABILITY;
import static org.osgi.framework.namespace.ExecutionEnvironmentNamespace.EXECUTION_ENVIRONMENT_NAMESPACE;
import static org.osgi.framework.namespace.IdentityNamespace.CAPABILITY_TYPE_ATTRIBUTE;
import static org.osgi.framework.namespace.IdentityNamespace.IDENTITY_NAMESPACE;
import static org.osgi.framework.namespace.IdentityNamespace.TYPE_BUNDLE;
import static org.osgi.framework.namespace.IdentityNamespace.TYPE_FRAGMENT;
public class SubsystemResolver implements SubsystemResolverResolution, SubsystemResolverResult {
private static final Logger LOGGER = LoggerFactory.getLogger(SubsystemResolver.class);
private DownloadManager manager;
private Resolver resolver;
private RegionDigraph digraph;
private Subsystem root;
private Map> wiring;
// Cached computed results
private ResourceImpl environmentResource;
private Map flatSubsystemsMap;
private Map> bundlesPerRegions;
private Map bundles;
private Map> featuresPerRegions;
private Map features;
private RegionDigraph flatDigraph;
private Map> bundleInfos;
private SubsystemResolverCallback callback;
public SubsystemResolver(Resolver resolver, DownloadManager manager) {
this.resolver = resolver;
this.manager = manager;
}
public void setDeployCallback(SubsystemResolverCallback callback) {
this.callback = callback;
}
@Override
public void prepare(
Map> allFeatures,
Map> requirements,
Map> system
) throws Exception {
// #1. Build subsystems on the fly
// - regions use hierarchical names with root region called "root" and child regions named "root/child",
// "root/child/grandchild", etc.
// - there can be only one root region and even if equinox Regions can be configured as digraph, only tree
// structure is used
// - each region will have corresponding Subsystem created and (being an OSGi Resource), will _require_
// related requirements. Each region's subsystem will also _require_ all child subsystems
for (Map.Entry> entry : requirements.entrySet()) {
String[] parts = entry.getKey().split("/");
if (root == null) {
root = new Subsystem(parts[0]);
} else if (!root.getName().equals(parts[0])) {
throw new IllegalArgumentException("Can not use multiple roots: " + root.getName() + ", " + parts[0]);
}
Subsystem ss = root;
for (int i = 1; i < parts.length; i++) {
String childName = String.join("/", Arrays.copyOfRange(parts, 0, i + 1));
ss = getOrCreateChild(ss, childName, parts[i]);
}
for (String requirement : entry.getValue()) {
// #1a. each "[feature:]*" and "requirement:*" requirements are added directly as resource requirements:
// - feature: ns=osgi.identity, 'osgi.identity=f1; type=karaf.feature; filter:="(&(osgi.identity=f1)(type=karaf.feature))"'
// - requirement: as-is
// - bundle: added only as downloadable bundle - used only by assembly builder
ss.require(requirement);
}
}
if (root == null) {
return;
}
// #2. Pre-resolve
// - for each region's subsystem X, feature requirements are changed into child subsystems of X
// - for each feature, any dependant features (/) will become non-mandatory (why?)
// child subsystem of the same region's subsystem as original feature
// - for each feature, any conditional (/) will become mandatory (why?)
// child subsystem of the original feature's subsystem
root.build(allFeatures);
// #3. Add system resources
// - from all unmanaged bundles we'll gather Provide-Capability headers' clauses in "osgi.service" namespace
// and Export-Service headers
// - these capabilities will be added to "dummy" Resource added as o.a.k.features.internal.region.Subsystem.installable
BundleRevision sysBundleRev = null;
boolean hasEeCap = false;
for (Map.Entry> entry : system.entrySet()) {
Subsystem ss = null;
String[] parts = entry.getKey().split("/");
String path = parts[0];
if (path.equals(root.getName())) {
ss = root;
}
for (int i = 1; ss != null && i < parts.length; i++) {
path += "/" + parts[i];
ss = ss.getChild(path);
}
if (ss != null) {
ResourceImpl dummy = new ResourceImpl("dummy", "dummy", Version.emptyVersion);
for (BundleRevision res : entry.getValue()) {
// We need to explicitely provide service capabilities for bundles
// We use both actual services and services declared from the headers
// TODO: use actual services
Map headers = new DictionaryAsMap<>(res.getBundle().getHeaders());
Resource tmp = ResourceBuilder.build(res.getBundle().getLocation(), headers);
for (Capability cap : tmp.getCapabilities(ServiceNamespace.SERVICE_NAMESPACE)) {
dummy.addCapability(new CapabilityImpl(dummy, cap.getNamespace(), cap.getDirectives(), cap.getAttributes()));
}
ss.addSystemResource(res);
for (Capability cap : res.getCapabilities(null)) {
hasEeCap |= cap.getNamespace().equals(EXECUTION_ENVIRONMENT_NAMESPACE);
}
if (res.getBundle().getBundleId() == 0) {
sysBundleRev = res;
}
}
ss.addSystemResource(dummy);
}
}
// Under Equinox, the osgi.ee capabilities are not provided by the system bundle
if (!hasEeCap && sysBundleRev != null) {
String provideCaps = sysBundleRev.getBundle().getHeaders().get(PROVIDE_CAPABILITY);
environmentResource = new ResourceImpl("environment", "karaf.environment", Version.emptyVersion);
environmentResource.addCapabilities(ResourceBuilder.parseCapability(environmentResource, provideCaps));
root.addSystemResource(environmentResource);
}
}
@Override
public Set collectPrerequisites() {
if (root != null) {
return root.collectPrerequisites();
}
return new HashSet<>();
}
@Override
public Map> resolve(
String featureResolutionRange,
FeaturesService.ServiceRequirementsBehavior serviceRequirements,
final Repository globalRepository,
String outputFile) throws Exception {
if (root == null) {
return Collections.emptyMap();
}
// Download bundles
root.downloadBundles(manager, featureResolutionRange, serviceRequirements, new RepositoryManager(), callback);
// Populate digraph and resolve
digraph = new StandardRegionDigraph(null, null);
populateDigraph(digraph, root);
Downloader downloader = manager.createDownloader();
SubsystemResolveContext context = new SubsystemResolveContext(root, digraph, globalRepository, downloader, serviceRequirements);
if (outputFile != null) {
Map json = new HashMap<>();
if (globalRepository != null) {
json.put("globalRepository", toJson(globalRepository));
}
json.put("repository", toJson(context.getRepository()));
try {
// this is where the magic happens...
wiring = resolver.resolve(context);
json.put("success", "true");
json.put("wiring", toJson(wiring));
} catch (Exception e) {
json.put("success", "false");
json.put("exception", e.toString());
throw e;
} finally {
try (Writer writer = Files.newBufferedWriter(
Paths.get(outputFile),
StandardCharsets.UTF_8,
StandardOpenOption.CREATE, StandardOpenOption.TRUNCATE_EXISTING)) {
JsonWriter.write(writer, json, true);
}
}
} else {
// this is where the magic happens...
wiring = resolver.resolve(context);
}
downloader.await();
// Remove wiring to the fake environment resource
if (environmentResource != null) {
for (List wires : wiring.values()) {
wires.removeIf(wire -> wire.getProvider() == environmentResource);
}
}
// Fragments are always wired to their host only, so create fake wiring to
// the subsystem the host is wired to
associateFragments();
return wiring;
}
private static Object toJson(Map> wiring) {
Map>> wires = new HashMap<>();
for (Map.Entry> reswiring : wiring.entrySet()) {
Resource resource = reswiring.getKey();
String id = toString(resource);
List