com.metaeffekt.artifact.analysis.spdxbom.InventorySpdxExporter Maven / Gradle / Ivy
/*
* Copyright 2021-2024 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.metaeffekt.artifact.analysis.spdxbom;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.metaeffekt.artifact.analysis.spdxbom.config.AssembledConfig;
import com.metaeffekt.artifact.analysis.spdxbom.config.ConfigAssembler;
import com.metaeffekt.artifact.analysis.spdxbom.context.SpdxDocumentContext;
import com.metaeffekt.artifact.analysis.spdxbom.facade.SpdxApiFacade;
import com.metaeffekt.artifact.analysis.spdxbom.facade.SpdxJsonFilter;
import com.metaeffekt.artifact.analysis.spdxbom.hierarchy.HierarchyGraph;
import com.metaeffekt.artifact.analysis.spdxbom.hierarchy.HierarchyGraphEdge;
import com.metaeffekt.artifact.analysis.spdxbom.hierarchy.HierarchyGraphNode;
import com.metaeffekt.artifact.analysis.spdxbom.mapper.ArtifactMapper;
import com.metaeffekt.artifact.analysis.spdxbom.mapper.exception.NoApplicableMapperException;
import com.metaeffekt.artifact.terms.model.LicenseTextEntry;
import com.metaeffekt.artifact.terms.model.LicenseTextProvider;
import com.metaeffekt.artifact.terms.model.NormalizationMetaData;
import com.metaeffekt.artifact.terms.model.TermsMetaData;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.metaeffekt.core.inventory.processor.model.Artifact;
import org.metaeffekt.core.inventory.processor.model.AssetMetaData;
import org.metaeffekt.core.inventory.processor.model.Constants;
import org.metaeffekt.core.inventory.processor.model.Inventory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.spdx.jacksonstore.MultiFormatStore;
import org.spdx.library.InvalidSPDXAnalysisException;
import org.spdx.library.Read;
import org.spdx.library.SpdxConstants;
import org.spdx.library.model.*;
import org.spdx.library.model.enumerations.AnnotationType;
import org.spdx.library.model.enumerations.ChecksumAlgorithm;
import org.spdx.library.model.enumerations.ReferenceCategory;
import org.spdx.library.model.enumerations.RelationshipType;
import org.spdx.library.model.license.AnyLicenseInfo;
import org.spdx.library.model.license.ExtractedLicenseInfo;
import org.spdx.storage.IModelStore;
import org.spdx.storage.simple.InMemSpdxStore;
import java.io.File;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.StandardOpenOption;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* This class is responsible for exporting any given input inventory into an SPDX-document of any
* supported type. This is currently limited to .json format.
* To do this, the class heavily relies on the spdx java library to adhere to the SPDX standard, as well as a custom
* graph structure to accurately map the relationships between different assets and artifact.
*/
public class InventorySpdxExporter {
public static final Logger LOG = LoggerFactory.getLogger(InventorySpdxExporter.class);
private static final String unspecificIdComment =
"This license identifier is marked \"unspecific\" and does not uniquely match a license of a specific version or variant.\n" +
"A license text can therefore not be asserted. Further analysis is required to determine the exact license.";
private static final String expressionIdComment =
"This license identifier is marked \"expression\" and could not be fully resolved.\n" +
"Otherwise you'd likely see a LicenseRef-X WITH LicenseRef-Y with separate texts here.\n" +
"Spdx 2.4 doesn't support LicenseRef for exceptions.\n" +
"Therefore we couldn't construct the proper \"WITH\" and created a combined LicenseRef.\n" +
"Since an expression is a construct of multiple texts, a concrete text could not be displayed.";
/**
* NormalizationMetaData, required for license string translation.
*/
private final NormalizationMetaData normalizationMetaData;
/**
* Provider of license texts, used to append license texts that spdx doesn't know about.
*/
private final LicenseTextProvider licenseTextProvider;
/**
* These attributes are approved for inclusion by ways of the key-value map in an annotation.
* No mapping is required for these, as the key-value map is considered a sufficiently good representation.
*/
private final Set keyValueApprovedAttributes;
/**
* These attributes won't be included in the kay-value map and we ignore them entirely if needs be.
* No error will be thrown even if they are not represented at all.
*/
private final Set attributeIgnoreInclusionList;
private final LicenseStringConverter licenseStringConverter;
public InventorySpdxExporter(final Collection keyValueApprovedAttributes,
final Collection attributeIgnoreInclusionList,
final NormalizationMetaData normalizationMetaData,
final LicenseTextProvider licenseTextProvider) {
this.keyValueApprovedAttributes = keyValueApprovedAttributes == null ?
new HashSet<>() :
new HashSet<>(keyValueApprovedAttributes);
this.attributeIgnoreInclusionList = attributeIgnoreInclusionList == null ?
new HashSet<>() :
new HashSet<>(attributeIgnoreInclusionList);
this.normalizationMetaData = Objects.requireNonNull(normalizationMetaData);
this.licenseStringConverter = new LicenseStringConverter(normalizationMetaData, null);
this.licenseTextProvider = licenseTextProvider;
}
protected void ensureParentDirExists(File someFile) {
try {
if (FileUtils.createParentDirectories(someFile) == null) {
throw new IllegalArgumentException("Could not create parent directories for this file.");
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void postProcessJson(File file) throws IOException {
SpdxJsonFilter.filterJson(file, file);
}
/**
* Wraps the export method with a MultiFormatStore from spdx's libraries.
*
* @param inventory the artifact inventory to be exported
* @param outFile the file that the exported document will be written to
* @param spdxDocumentSpec and export spec with document metadata
*/
public void toMultiFormatStore(Inventory inventory, File outFile, SpdxDocumentSpec spdxDocumentSpec) {
// check prerequisites
ensureParentDirExists(Objects.requireNonNull(outFile));
try (final IModelStore baseStore = new InMemSpdxStore()) {
try (final MultiFormatStore modelStore = new MultiFormatStore(baseStore, spdxDocumentSpec.getFormat())) {
exportToSpdxDocument(inventory, spdxDocumentSpec, modelStore);
try (final OutputStream outputStream = Files.newOutputStream(outFile.toPath(),
StandardOpenOption.CREATE,
StandardOpenOption.TRUNCATE_EXISTING)) {
modelStore.serialize(spdxDocumentSpec.getDocumentUri(), outputStream);
}
}
} catch (Exception e) {
// this catch-all rethrow is required since the store's close() method throws "Exception"
throw new RuntimeException(e);
}
try {
postProcessJson(outFile);
} catch (IOException e) {
// failed to process
throw new RuntimeException(e);
}
}
/**
* Gets asset packages that contain a certain artifact.
*
* @param assetIdToAssetPackage a map of asset ids to their asset packages
* @param artifact the artifact to search for
* @return returns a set of asset packages that this artifact is included in
*/
protected Set getAssetPackagesContaining(Map assetIdToAssetPackage, Artifact artifact) {
final Set assetPackages = new LinkedHashSet<>();
for (Map.Entry entry : assetIdToAssetPackage.entrySet()) {
// relationships are recorded with a cross tab of artifact -> assetId
String lookup = artifact.get(entry.getKey());
if (lookup != null && !lookup.isEmpty()) {
if ("x".equals(lookup)) {
assetPackages.add(entry.getValue());
} else {
LOG.warn("Invalid value [{}] in asset lookup for artifact [{}]. Expected: empty field or 'x'.",
lookup, artifact.getId());
}
}
}
return assetPackages;
}
/**
* Method uses an Asset
from our inventory to generate a special package.
* {@link #exportToSpdxDocument} uses these packages to create relationships
* to the packages used by the asset.
*
* @param spdxDocumentContext The document context.
* @param assetMetaData Data used for asset package creation.
* @return Returns the SpdxPackage for this asset
* @throws InvalidSPDXAnalysisException Throws if package creation fails.
*/
protected SpdxPackage getAssetPackage(
SpdxDocumentContext spdxDocumentContext,
AssetMetaData assetMetaData,
final Set keyValueApprovedAttributes,
final Set attributeIgnoreInclusionList,
Set allUnmappedAttributes
)
throws InvalidSPDXAnalysisException {
Set written = new HashSet<>();
SpdxDocument document = spdxDocumentContext.getSpdxDocument();
String assetId = assetMetaData.get(AssetMetaData.Attribute.ASSET_ID.getKey());
// create a package for this asset; initially containing NOASSERTION values
final AnyLicenseInfo noAssertionLicenseInfo = SpdxApiFacade.parseLicenseString(
SpdxConstants.NOASSERTION_VALUE,
spdxDocumentContext
);
SpdxPackage.SpdxPackageBuilder assetPackageBuilder = document.createPackage(
document.getModelStore().getNextId(IModelStore.IdType.SpdxId, document.getDocumentUri()),
assetId, noAssertionLicenseInfo, SpdxConstants.NOASSERTION_VALUE, noAssertionLicenseInfo
)
.setFilesAnalyzed(false)
.setDownloadLocation(SpdxConstants.NOASSERTION_VALUE);
written.add(AssetMetaData.Attribute.ASSET_ID.getKey());
String digest = assetMetaData.get("Digest");
// TODO: varied hash support and conversion to spdx (as of writing, only sha256 will work)
if (StringUtils.isNotBlank(digest) && digest.startsWith("sha256:")) {
final Checksum sha256sum = document.createChecksum(ChecksumAlgorithm.SHA256, digest.substring(7));
final Collection checksums = new ArrayList<>();
checksums.add(sha256sum);
assetPackageBuilder.setChecksums(checksums);
written.add("Digest");
}
if (StringUtils.isNotBlank(assetMetaData.get("Image Id"))) {
assetPackageBuilder.setVersionInfo(assetMetaData.get("Image Id"));
// do NOT consider this written since the version <-> Image Id relationship may not be clear to everyone
}
if (StringUtils.isNotBlank(assetMetaData.get("Supplier"))) {
assetPackageBuilder.setSupplier("Organization: " + assetMetaData.get("Supplier"));
written.add("Supplier");
}
if (StringUtils.isNotBlank(assetMetaData.get("Repository"))) {
assetPackageBuilder.addExternalRef(
document.createExternalRef(
ReferenceCategory.OTHER,
new ReferenceType(SpdxConstants.SPDX_LISTED_REFERENCE_TYPES_PREFIX + "url"),
assetMetaData.get("Repository"),
null
)
);
written.add("Repository");
}
SpdxPackage assetPackage = assetPackageBuilder.build();
assetPackage.setVersionInfo(assetMetaData.get(AssetMetaData.Attribute.VERSION));
written.add(AssetMetaData.Attribute.VERSION.getKey());
Map overrides = new HashMap<>();
for (String key : assetMetaData.getAttributes()) {
if (!written.contains(key) && !attributeIgnoreInclusionList.contains(key)) {
overrides.put(key, assetMetaData.get(key));
if (!keyValueApprovedAttributes.contains(key)) {
allUnmappedAttributes.add(key);
}
}
}
// register attribute values in annotation
ObjectMapper objectMapper = new ObjectMapper();
JsonNode jsonNode = objectMapper.valueToTree(overrides);
try {
String jsonString = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(jsonNode);
assetPackage.setAnnotations(Collections.singletonList(document.createAnnotation(
"Tool: " + spdxDocumentContext.getSpdxDocumentSpec().getTool(),
AnnotationType.OTHER,
new SimpleDateFormat(SpdxConstants.SPDX_DATE_FORMAT).format(new Date()),
jsonString
)));
} catch (JsonProcessingException e) {
throw new RuntimeException(e);
}
return assetPackage;
}
private TermsMetaData findTmdByCanonicalName(String canonicalName) {
TermsMetaData tmd = normalizationMetaData.getTermsMetaData(canonicalName);
if (tmd == null) {
// anticipate that the license name changed; in this case the history should resolve it
// TODO: move resolving using the history to the license string converter in earlier processing. it should
// then hand through the actual TMD instead of us bodging stuff here, after the fact.
// this would mean having to create a guarantee for up-to-date canonicalNames for this method.
tmd = normalizationMetaData.findUsingCanonicalNameInHistory(canonicalName);
}
return tmd;
}
/**
* Tries to add text for referenced licenses to the spdx document (license texts for licenserefs).
*
* @param spdxDocumentContext The SpdxDocumentContext to in the operation is performed.
* @param referencedLicenses the list of license names that need to have a text appended
*/
// TODO: rework this method. a lot of its edge cases should be simplified either now or soon.
// e.g. no more fake tmd objects, separate handling of unknown "license names" etc.
protected void addReferencedLicenses(SpdxDocumentContext spdxDocumentContext, Collection referencedLicenses)
throws InvalidSPDXAnalysisException {
// early-abort if there is nothing to be resolved
if (referencedLicenses.isEmpty()) return;
final SpdxDocument spdxDocument = spdxDocumentContext.getSpdxDocument();
// make a deduplicated list of license names that need a text
final Set referencedCanonicalNames = referencedLicenses.stream()
.map(TermsMetaData::getCanonicalName)
.collect(Collectors.toSet());
final Map licenseTextEntryByName;
if (licenseTextProvider != null) {
licenseTextEntryByName = licenseTextProvider.resolve(referencedCanonicalNames);
} else {
licenseTextEntryByName = Collections.emptyMap();
}
// first process those for which texts were found
for (Map.Entry licenseNameToTextEntry : licenseTextEntryByName.entrySet()) {
final String canonicalName = licenseNameToTextEntry.getKey();
final LicenseTextEntry licenseTextEntry = licenseNameToTextEntry.getValue();
TermsMetaData tmd = findTmdByCanonicalName(canonicalName);
if (tmd == null) {
LOG.warn(
"Passed name [{}] is not in NormalizationMetaData. Good database entry needed. Skipping.",
canonicalName
);
continue;
}
final String derivedIdentifier = licenseStringConverter.getLicenseRefForTmd(tmd);
// sanity check: derivedIdentifier should be a lone licenseRef, not a resolved set or expression.
// if it is not any more, we need to implement some more splitting right here.
if (derivedIdentifier.contains(" ") || !derivedIdentifier.startsWith("LicenseRef")) {
LOG.error("Sanity check failed: derivedIdentifier was not a lone licenseRef. May cause errors.");
}
final ExtractedLicenseInfo extractedLicenseInfo;
LOG.info("Adding text for [{}/{}].", canonicalName, derivedIdentifier);
extractedLicenseInfo = SpdxApiFacade.createExtractedLicenseInfo(
derivedIdentifier,
licenseTextEntry.getLicenseText(),
spdxDocumentContext
);
// add this object to the document
spdxDocument.addExtractedLicenseInfos(extractedLicenseInfo);
}
// process unspecific licenses: no license text can be found for these
for (String canonicalName : referencedCanonicalNames) {
// resolve tmd
TermsMetaData tmd = normalizationMetaData.findTermsMetaData(canonicalName);
if (tmd == null) {
LOG.warn(
"Passed name [{}] is not in NormalizationMetaData. Good database entry needed. Skipping.",
canonicalName
);
continue;
}
if (!tmd.isUnspecific()) {
// not unspecific. can't do anything for this. either it had a license text or it's some error
continue;
}
// do not add notice if we DID find some preset text or notice already
if (licenseTextEntryByName.containsKey(canonicalName)) {
LOG.warn(
"Unspecific license [{}] has a text? That's against the spec that this method had in mind.",
canonicalName
);
LOG.warn("Not adding dummy license notice for unspecific license [{}].", canonicalName);
continue;
}
String derivedName = licenseStringConverter.getLicenseRefForTmd(tmd);
LOG.info("Adding placeholder text for [{}/{}].", canonicalName, derivedName);
ExtractedLicenseInfo extractedLicenseInfo = SpdxApiFacade.createExtractedLicenseInfo(
derivedName,
SpdxConstants.NOASSERTION_VALUE,
spdxDocumentContext
);
extractedLicenseInfo.setComment(unspecificIdComment);
// add this object to the document
spdxDocument.addExtractedLicenseInfos(extractedLicenseInfo);
}
// process expressions: they may or may not have a text. Most do not as they are a helper construct of tmd.
// these need separate processing since spdx-2.4 doesn't have LicenseRef for exceptions.
for (String canonicalName : referencedCanonicalNames) {
// resolve tmd
TermsMetaData tmd = normalizationMetaData.findTermsMetaData(canonicalName);
if (tmd == null) {
LOG.warn(
"Passed name [{}] is not in NormalizationMetaData. Good database entry needed. Skipping.",
canonicalName
);
continue;
}
if (!tmd.isExpression()) {
// not an expression. can't do anything for this. either it had a license text or it's some error
continue;
}
// do not add notice if we DID find some preset text or notice already
if (licenseTextEntryByName.containsKey(canonicalName)) {
LOG.warn(
"Unspecific license [{}] has a text? That's against the spec that this method had in mind.",
canonicalName
);
LOG.warn("Not adding dummy license notice for unspecific license [{}].", canonicalName);
continue;
}
String derivedName = licenseStringConverter.getLicenseRefForTmd(tmd);
LOG.info("Adding placeholder text for [{}/{}].", canonicalName, derivedName);
ExtractedLicenseInfo extractedLicenseInfo = SpdxApiFacade.createExtractedLicenseInfo(
derivedName,
SpdxConstants.NOASSERTION_VALUE,
spdxDocumentContext
);
extractedLicenseInfo.setComment(expressionIdComment);
// add this object to the document
spdxDocument.addExtractedLicenseInfos(extractedLicenseInfo);
}
}
/**
* Exports an inventory to spdx.
*
* @param inventory the inventory to export
* @param exportSpec contains metadata for document creation
* @param modelStore a backing store for spdx to write into
* @throws InvalidSPDXAnalysisException throws on issues with document creation (filling data etc)
*/
// TODO: maybe this method should not just log, but also return a list of issues?
protected void exportToSpdxDocument(Inventory inventory, SpdxDocumentSpec exportSpec, IModelStore modelStore) throws InvalidSPDXAnalysisException {
final SpdxDocumentContext spdxDocumentContext = new SpdxDocumentContext(exportSpec, modelStore);
// assemble configs
final MapperRunner mapperRunner = getMapperRunner();
// collect unmapped attributes for later
Set allUnmappedAttributes = new HashSet<>();
// create a map to set asset packages as parents of artifact packages
final Map assetIdToAssetPackage = new LinkedHashMap<>();
// graph structure to map artifact and asset relationships
HierarchyGraph hierarchyGraph = mapRelationshipHierarchy(inventory, spdxDocumentContext.getSpdxDocument());
for (AssetMetaData assetMetaData : inventory.getAssetMetaData()) {
// TODO: build an asset tree. then later we won't add all assets to documentdescribes any more.
// something about looking up assets in the list of artifacts? (missing examples)
// FIXME: review with JKR; when are assets labeled "Primary"?
String primaryAsset = assetMetaData.get("Primary");
if (StringUtils.isNotBlank(primaryAsset)) {
// add all asset packages to the document
assetIdToAssetPackage.put(
assetMetaData.get(AssetMetaData.Attribute.ASSET_ID.getKey()),
getAssetPackage(
spdxDocumentContext,
assetMetaData,
keyValueApprovedAttributes,
attributeIgnoreInclusionList,
allUnmappedAttributes
)
);
}
}
// we'll need the document a couple of times to let's store a reference
final SpdxDocument document = spdxDocumentContext.getSpdxDocument();
for (SpdxPackage assetPackage : assetIdToAssetPackage.values()) {
// FIXME: we need to build an asset tree and only add the root elements
// FIXME: also the case when no asset is provided needs to be considered
// add this asset to the list of stuff that this document "describes"
document.getDocumentDescribes().add(assetPackage);
}
final Set referencedLicenses = new HashSet<>();
for (Artifact artifact : inventory.getArtifacts()) {
// first map an artifact to an spdx item
ArtifactMapper.Result mapperRunnerResult;
try {
mapperRunnerResult = mapperRunner.map(artifact, spdxDocumentContext);
} catch (NoApplicableMapperException e) {
throw new RuntimeException(e.getMessage(), e);
}
// collect attributes present in artifact that were not written
final HashSet missing = new HashSet<>(artifact.getAttributes());
missing.removeAll(mapperRunnerResult.getAttributesWritten());
missing.removeAll(attributeIgnoreInclusionList);
// ignore attribute keys with "empty" values. for example some fillers only run conditionally
missing.removeIf((k) -> StringUtils.isBlank(artifact.get(k)));
allUnmappedAttributes.addAll(missing);
// collect a list of referenced licenses
referencedLicenses.addAll(mapperRunnerResult.getReferencedLicenses());
}
Stream allPackages = Read.getAllPackages(document.getModelStore(), document.getDocumentUri());
List packageList = allPackages.collect(Collectors.toList());
List desribeRelationships = hierarchyGraph.getDescribeRelationships();
for (SpdxPackage spdxPackage : packageList) {
Optional optionalName = spdxPackage.getName();
String nodeName = optionalName.orElse(null);
List relationshipList = hierarchyGraph.getRelationship(nodeName);
if (nodeName != null && !relationshipList.isEmpty()) {
for (HierarchyGraphEdge edge : relationshipList) {
String toNode = edge.getToNode().getId();
Relationship relationship = document.createRelationship(SpdxApiFacade.getSpdxElementByName(packageList,
toNode), edge.getRelationshipType(), null);
spdxPackage.addRelationship(relationship);
}
}
for (HierarchyGraphEdge relationship : desribeRelationships) {
if (relationship.getToNode().getId().equals(nodeName) && relationship.getRelationshipType() == RelationshipType.DESCRIBES) {
Relationship describeRelationship =
document.createRelationship(SpdxApiFacade.getSpdxElementByName(packageList, nodeName), relationship.getRelationshipType(), null);
document.addRelationship(describeRelationship);
}
}
}
// try to add text for all referenced (unknown by spdx) licenses
addReferencedLicenses(spdxDocumentContext, referencedLicenses);
// run our own validation check for written attributes, ignoring those in attributeIgnoreList
if (!allUnmappedAttributes.isEmpty()) {
LOG.warn("Got [{}] unmapped attributes while exporting", allUnmappedAttributes.size());
for (String unmapped : allUnmappedAttributes) {
LOG.warn(" - " + unmapped);
}
}
// run document verification and print warnings if present
final Map warningsDeduplicated = new TreeMap<>();
List warnings = document.verify();
for (String warn : warnings) {
Long val = warningsDeduplicated.getOrDefault(warn, 0L) + 1L;
warningsDeduplicated.put(warn, val);
}
if (!warningsDeduplicated.isEmpty()) {
LOG.warn("Got [{}] ([{}]) warning(s) from spdx document verification: warn (number of times):",
warningsDeduplicated.size(), warnings.size());
for (Map.Entry warn : warningsDeduplicated.entrySet()) {
if (warn.getValue() > 1) {
LOG.warn(" - {} ([{}] times)", warn.getKey(), warn.getValue());
} else {
LOG.warn(" - {}", warn.getKey());
}
}
}
}
/**
* Maps the all the relationships provided in the input inventory onto a graph structure.
*
* @param inventory the source inventory in which to map all relationships
* @param document a reference to the final document
* @return returns a HierarchyGraph with all relationships mapped within
*/
public HierarchyGraph mapRelationshipHierarchy(Inventory inventory, SpdxDocument document) {
// custom graph structure to map different SPDX relationships between artifacts and assets
HierarchyGraph hierarchyGraph = new HierarchyGraph();
// maps asset ids to artifact ids if present to accurately
Map assetIdToArtifactId = new HashMap<>();
List assetIds = new ArrayList<>();
hierarchyGraph.addNode(new HierarchyGraphNode(document.getId(), null, HierarchyGraphNode.NODE_TYPE.DOCUMENT));
// loops through all assets and artifacts, maps asset ids and correlating artifact ids, as well as create all nodes for
// further assignment of relationships
for (AssetMetaData metaData : inventory.getAssetMetaData()) {
String assetId = metaData.get(AssetMetaData.Attribute.ASSET_ID);
assetIds.add(assetId);
if (StringUtils.isNotBlank(metaData.get("Primary"))) {
hierarchyGraph.addNode(new HierarchyGraphNode(assetId, null, HierarchyGraphNode.NODE_TYPE.PRIMARY));
} else {
for (Artifact artifact : inventory.getArtifacts()) {
hierarchyGraph.addNode(new HierarchyGraphNode(artifact.getId(), artifact, HierarchyGraphNode.NODE_TYPE.ARTIFACT));
if (artifact.get(assetId) != null && artifact.get(assetId).equals(Constants.MARKER_CROSS)) {
assetIdToArtifactId.put(assetId, artifact.get(Artifact.Attribute.ID));
}
}
}
}
// Creates the relationships between all nodes
for (Artifact artifact : inventory.getArtifacts()) {
boolean relationShipFound = false;
for (String assetId : assetIds) {
if (artifact.get(assetId) != null && artifact.get(assetId).equals(Constants.MARKER_CONTAINS)) {
hierarchyGraph.addRelationship(assetIdToArtifactId.get(assetId), artifact.get(Artifact.Attribute.ID), RelationshipType.CONTAINS);
relationShipFound = true;
}
}
if (!relationShipFound) {
hierarchyGraph.addRelationship(document.getId(), artifact.get(Artifact.Attribute.ID),
RelationshipType.DESCRIBES);
}
if (hierarchyGraph.primaryId != null) {
hierarchyGraph.addRelationship(hierarchyGraph.primaryId, artifact.get(Artifact.Attribute.ID), RelationshipType.CONTAINS);
}
relationShipFound = false;
}
if (hierarchyGraph.primaryId != null) {
hierarchyGraph.addRelationship(document.getId(), hierarchyGraph.primaryId, RelationshipType.DESCRIBES);
}
return hierarchyGraph;
}
private MapperRunner getMapperRunner() {
ConfigAssembler configAssembler =
new ConfigAssembler(
Collections.emptyMap(),
keyValueApprovedAttributes,
normalizationMetaData,
ConfigAssembler.defaultMappers
);
List assembledConfigs = configAssembler.loadDefaultConfigs();
// TODO: load custom configs once there is a way to write / inject them
// create mapperRunner with configs
return new MapperRunner(assembledConfigs);
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy