com.metaeffekt.artifact.analysis.diffmerge.InventoryMerger Maven / Gradle / Ivy
The newest version!
/*
* Copyright 2021-2024 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.metaeffekt.artifact.analysis.diffmerge;
import com.metaeffekt.artifact.analysis.utils.TimeUtils;
import com.metaeffekt.artifact.analysis.vulnerability.enrichment.InventoryAttribute;
import com.metaeffekt.artifact.analysis.vulnerability.enrichment.vulnerabilitystatus.VulnerabilityStatus;
import com.metaeffekt.artifact.analysis.vulnerability.enrichment.vulnerabilitystatus.VulnerabilityStatusReviewedEntry;
import com.metaeffekt.mirror.contents.advisory.AdvisoryEntry;
import com.metaeffekt.mirror.contents.base.VulnerabilityContextInventory;
import com.metaeffekt.mirror.contents.vulnerability.Vulnerability;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.metaeffekt.core.inventory.processor.model.*;
import org.metaeffekt.core.inventory.processor.reader.InventoryReader;
import java.io.File;
import java.io.IOException;
import java.util.*;
@Getter
@Slf4j
public class InventoryMerger {
private final static String[] MERGE_VULNERABILITY_STATUS_ORDER_DESCENDING =
{VulnerabilityMetaData.STATUS_VALUE_APPLICABLE, VulnerabilityMetaData.STATUS_VALUE_IN_REVIEW,
VulnerabilityMetaData.STATUS_VALUE_INSIGNIFICANT, VulnerabilityMetaData.STATUS_VALUE_NOTAPPLICABLE,
VulnerabilityMetaData.STATUS_VALUE_VOID};
private final Inventory outputInventory;
private final Map referenceInventories = new LinkedHashMap<>();
public InventoryMerger() {
this.outputInventory = new Inventory();
}
public InventoryMerger(Inventory outputInventory) {
this.outputInventory = outputInventory;
}
public InventoryMerger(File file) throws IOException {
if (file == null) {
throw new IllegalArgumentException("Inventory file must not be null");
}
if (!file.exists()) {
outputInventory = new Inventory();
} else {
outputInventory = new InventoryReader().readInventory(file);
}
}
public void addReferenceInventory(Inventory inventory, String context) {
referenceInventories.put(inventory, context);
}
public void addReferenceInventory(File file) throws IOException {
final Inventory inventory = new InventoryReader().readInventory(file);
addReferenceInventory(inventory, file.getName().replace(".xls", ""));
}
public void includeArtifacts() {
log.info("Merging artifacts from [{}] reference inventories", referenceInventories.size());
for (Map.Entry inventoryContext : referenceInventories.entrySet()) {
Inventory inventory = inventoryContext.getKey();
String context = inventoryContext.getValue();
for (Artifact artifact : inventory.getArtifacts()) {
Artifact outputArtifact = outputInventory.findArtifact(artifact, true);
if (outputArtifact == null) {
appendContext(artifact, context);
outputInventory.getArtifacts().add(artifact);
} else {
appendContext(outputArtifact, context);
appendVulnerabilityData(outputArtifact, artifact);
}
}
}
}
public void includeAssets() {
log.info("Merging Assets from [{}] reference inventories.", referenceInventories.size());
for (Map.Entry inventoryContext : referenceInventories.entrySet()) {
Inventory inventory = inventoryContext.getKey();
for (AssetMetaData assetMetaData : inventory.getAssetMetaData()) {
AssetMetaData outputAsset = outputInventory.findAssetMetaData(assetMetaData.get(Constants.KEY_ASSET_ID), false);
if (outputAsset == null) {
outputInventory.getAssetMetaData().add(assetMetaData);
}
}
}
}
public void includeVulnerabilities() {
final TimeMeasure timeMeasure = new TimeMeasure();
log.info("Merging vulnerabilities from [{}] reference inventories", referenceInventories.size());
final VulnerabilityContextInventory vOutputInventory =
VulnerabilityContextInventory.fromInventory(this.outputInventory);
final Map vulnerabilityContextInventories = new LinkedHashMap<>();
for (final Map.Entry inventoryContext : referenceInventories.entrySet()) {
final Inventory inventory = inventoryContext.getKey();
final VulnerabilityContextInventory vInventory = VulnerabilityContextInventory.fromInventory(inventory);
vulnerabilityContextInventories.put(inventory, vInventory);
}
log.info(" [{}] 1. Parsed [{}] vulnerabilities from [{}] reference inventories", timeMeasure,
vulnerabilityContextInventories.values().stream().mapToInt(i -> i.getVulnerabilities().size()).sum(),
vulnerabilityContextInventories.size());
// (string - vulnerability) of the highest status history entry severity
final Map mostSevereStatusVulnerabilities = new LinkedHashMap<>();
for (Map.Entry contextInventoryEntry :
vulnerabilityContextInventories.entrySet()) {
final VulnerabilityContextInventory vCurrentInventory = contextInventoryEntry.getValue();
for (Vulnerability currentVulnerability : vCurrentInventory.getVulnerabilities()) {
if (mostSevereStatusVulnerabilities.get(currentVulnerability.getId()) == null) {
mostSevereStatusVulnerabilities.put(currentVulnerability.getId(), currentVulnerability);
} else {
final Vulnerability mostSevere = mostSevereStatusVulnerabilities.get(currentVulnerability.getId());
final Vulnerability merged =
this.findMostSevereStatusVulnerability(Arrays.asList(mostSevere, currentVulnerability));
mostSevereStatusVulnerabilities.put(currentVulnerability.getId(), merged);
}
}
}
log.info(" [{}] 2. Found most severe status for [{}] vulnerabilities", timeMeasure,
mostSevereStatusVulnerabilities.size());
// create vulnerabilities using those with the highest severity as a template
for (Vulnerability currentVulnerability : mostSevereStatusVulnerabilities.values()) {
if (currentVulnerability == null) {
continue;
}
vOutputInventory.findOrAppendVulnerabilityByVulnerability(
Vulnerability.fromJson(currentVulnerability.toJson()));
}
log.info(" [{}] 3. Created [{}] vulnerabilities with most severe status information in output inventory",
timeMeasure, vOutputInventory.getVulnerabilities().size());
// append all other vulnerabilities
for (Map.Entry contextInventoryEntry :
vulnerabilityContextInventories.entrySet()) {
final VulnerabilityContextInventory vCurrentInventory = contextInventoryEntry.getValue();
for (Vulnerability currentVulnerability : vCurrentInventory.getVulnerabilities()) {
if (mostSevereStatusVulnerabilities.get(currentVulnerability.getId()) == null) {
final Vulnerability vulnerability = vOutputInventory.findOrAppendVulnerabilityByVulnerability(
Vulnerability.fromJson(currentVulnerability.toJson()));
if (vulnerability.getVulnerabilityStatus() != null) {
vulnerability.getVulnerabilityStatus().clearHistoryEntries();
}
}
}
}
log.info(" [{}] 4. Appended remaining vulnerabilities to output inventory, now at [{}]", timeMeasure,
vOutputInventory.getVulnerabilities().size());
// from now on, pause re-association to avoid unnecessary overhead.
// the result is the same as if re-association was enabled.
vOutputInventory.pauseReAssociation();
// vOutputInventory.resumeReAssociation(); would be the way to re-enable re-association
// copy all security advisories
for (Vulnerability outputVulnerability : vOutputInventory.getVulnerabilities()) {
for (VulnerabilityContextInventory currentInventory : vulnerabilityContextInventories.values()) {
final Optional currentVulnerability =
currentInventory.findVulnerabilityByName(outputVulnerability.getId());
if (currentVulnerability.isPresent()) {
final Vulnerability vulnerability = currentVulnerability.get();
for (AdvisoryEntry advisoryEntry : vulnerability.getSecurityAdvisories()) {
final AdvisoryEntry mergedAdvisoryEntry =
vOutputInventory.findOrAppendAdvisoryEntryByAdvisoryEntry(advisoryEntry);
outputVulnerability.addSecurityAdvisory(mergedAdvisoryEntry);
}
}
}
}
log.info(" [{}] 5. Appended security advisories to output inventory, now at [{}]", timeMeasure,
vOutputInventory.getSecurityAdvisories().size());
// find the advisories that have been reviewed
// (vulnerability - (inventory - set advisory))
final Map>> reviewedAdvisories = new LinkedHashMap<>();
for (Map.Entry contextInventoryEntry :
vulnerabilityContextInventories.entrySet()) {
final VulnerabilityContextInventory vCurrentInventory = contextInventoryEntry.getValue();
for (Vulnerability currentVulnerability : vCurrentInventory.getVulnerabilities()) {
final VulnerabilityStatus currentStatus = currentVulnerability.getOrCreateNewVulnerabilityStatus();
final List currentReviewedAdvisories =
currentStatus.getReviewedAdvisories();
final Set reviewedAdvisoriesPerInventory =
reviewedAdvisories.computeIfAbsent(currentVulnerability.getId(), k -> new LinkedHashMap<>())
.computeIfAbsent(vCurrentInventory, k -> new LinkedHashSet<>());
for (VulnerabilityStatusReviewedEntry reviewedAdvisory : currentReviewedAdvisories) {
reviewedAdvisoriesPerInventory.add(reviewedAdvisory.getId());
}
}
}
log.info(" [{}] 6. Found [{}] reviewed advisories in [{}] reference inventories", timeMeasure,
reviewedAdvisories.values().stream().mapToInt(i -> i.values().stream().mapToInt(Set::size).sum()).sum(),
reviewedAdvisories.size());
// remove all reviewed advisories that are not present in all inventories
for (Map> inventoryRevieweAdvisories :
reviewedAdvisories.values()) {
final Set advisoryIds = new HashSet<>();
for (Set reviewedAdvisoriesPerInventory : inventoryRevieweAdvisories.values()) {
advisoryIds.addAll(reviewedAdvisoriesPerInventory);
}
// remove all advisory IDs that are not present in all inventories
advisoryIds.removeIf(e -> {
for (Set reviewedAdvisoriesPerInventory : inventoryRevieweAdvisories.values()) {
if (!reviewedAdvisoriesPerInventory.contains(e)) {
log.info("Marking partially reviewed security advisory as unreviewed [{}]", e);
return true;
}
}
return false;
});
// write the first occurrence of each advisory ID
for (Set reviewedAdvisoriesPerInventory : inventoryRevieweAdvisories.values()) {
reviewedAdvisoriesPerInventory.removeIf(e -> {
if (advisoryIds.contains(e)) {
advisoryIds.remove(e);
return false;
} else {
return true;
}
});
}
}
log.info(" [{}] 7. Removed partially reviewed advisories, now at [{}] reviewed advisories", timeMeasure,
reviewedAdvisories.values().stream().mapToInt(i -> i.values().stream().mapToInt(Set::size).sum())
.sum());
// (vulnerability - set advisory)
final Map> applicableReviewedAdvisories = new LinkedHashMap<>();
for (Map.Entry>> reviewedAdvisoriesPerVulnerability :
reviewedAdvisories.entrySet()) {
final String vulnerabilityId = reviewedAdvisoriesPerVulnerability.getKey();
for (Map.Entry> reviewedAdvisoriesPerInventory :
reviewedAdvisoriesPerVulnerability.getValue()
.entrySet()) {
for (String reviewedAdvisory : reviewedAdvisoriesPerInventory.getValue()) {
applicableReviewedAdvisories.computeIfAbsent(vulnerabilityId, k -> new LinkedHashSet<>())
.add(reviewedAdvisory);
}
}
}
log.info(" [{}] 8. Found [{}] applicable reviewed advisories", timeMeasure,
applicableReviewedAdvisories.values().stream().mapToInt(Set::size).sum());
// write the effective status back to the vulnerability
for (Map.Entry> reviewedAdvisoriesPerVulnerability :
applicableReviewedAdvisories.entrySet()) {
final Vulnerability outputVulnerability =
vOutputInventory.findOrCreateVulnerabilityByName(reviewedAdvisoriesPerVulnerability.getKey());
final VulnerabilityStatus outputStatus = outputVulnerability.getOrCreateNewVulnerabilityStatus();
for (String advisory : reviewedAdvisoriesPerVulnerability.getValue()) {
outputStatus.addReviewedAdvisoryEntry(advisory);
}
}
log.info(" [{}] 9. Wrote effective status back to vulnerabilities", timeMeasure);
vOutputInventory.writeBack(true);
log.info(" [{}] 10. Wrote output inventory back to file", timeMeasure);
}
public void includeAdvisories() {
final TimeMeasure timeMeasure = new TimeMeasure();
log.info("Merging advisories from [{}] reference inventories", referenceInventories.size());
final VulnerabilityContextInventory vOutputInventory =
VulnerabilityContextInventory.fromInventory(this.outputInventory);
final Map vulnerabilityContextInventories = new LinkedHashMap<>();
for (final Map.Entry inventoryContext : referenceInventories.entrySet()) {
final Inventory inventory = inventoryContext.getKey();
final VulnerabilityContextInventory vInventory = VulnerabilityContextInventory.fromInventory(inventory);
vulnerabilityContextInventories.put(inventory, vInventory);
}
log.info(" [{}] 1. Parsed [{}] advisories from [{}] reference inventories", timeMeasure,
vulnerabilityContextInventories.values().stream().mapToInt(i -> i.getSecurityAdvisories().size()).sum(),
vulnerabilityContextInventories.size());
// simply copy over all advisories once
final Map entries = new LinkedHashMap<>();
for (Map.Entry contextInventoryEntry :
vulnerabilityContextInventories.entrySet()) {
final VulnerabilityContextInventory vCurrentInventory = contextInventoryEntry.getValue();
for (AdvisoryEntry advisoryEntry : vCurrentInventory.getSecurityAdvisories()) {
entries.put(advisoryEntry.getId(), advisoryEntry);
}
}
log.info(" [{}] 2. Found [{}] unique advisories", timeMeasure, entries.size());
for (AdvisoryEntry advisoryEntry : entries.values()) {
vOutputInventory.findOrAppendAdvisoryEntryByAdvisoryEntry(advisoryEntry);
}
log.info(" [{}] 3. Appended [{}] unique advisories to output inventory", timeMeasure, entries.size());
vOutputInventory.pauseReAssociation();
vOutputInventory.writeBack(true);
log.info(" [{}] 4. Wrote output inventory back to file", timeMeasure);
}
/**
* Merges the statuses provided latest status history entries:
*
* - If no status is passed,
null
is returned
* -
* If any status is
null
, null
is returned, as one of the inventories contains a
* version of the vulnerability that has not yet been assessed.
*
* -
* Otherwise, the most severe of the statuses is returned, according to the order as specified in
* {@link InventoryMerger#MERGE_VULNERABILITY_STATUS_ORDER_DESCENDING}.
*
* - If none of the VMDs contained a valid status entry, the first is returned.
*
*
* @param vulnerabilities The vulnerabilities from which the most severe status is to be determined.
* @return The most severe status vulnerability or null
, if at least one of them is null or no status
* has been passed.
*/
private Vulnerability findMostSevereStatusVulnerability(Collection vulnerabilities) {
// not assessed
if (vulnerabilities.isEmpty()) {
return null;
}
if (vulnerabilities.stream().filter(Objects::nonNull).map(Vulnerability::getVulnerabilityStatus)
.anyMatch(status -> status == null || status.getLatestActiveStatusHistoryEntry() == null)) {
return null;
}
// return the most severe status
for (String checkStatus : MERGE_VULNERABILITY_STATUS_ORDER_DESCENDING) {
for (Vulnerability currentVulnerability : vulnerabilities) {
if (currentVulnerability != null && currentVulnerability.getVulnerabilityStatus() != null
&& currentVulnerability.getVulnerabilityStatus().getLatestActiveStatusHistoryEntry() != null) {
final String status =
currentVulnerability.getVulnerabilityStatus().getLatestActiveStatusHistoryEntry()
.getStatus();
if (checkStatus.equalsIgnoreCase(status)) {
return currentVulnerability;
}
}
}
}
// if none of the status fields contained a valid value, the first one is returned
return vulnerabilities.iterator().next();
}
private void appendContext(Artifact artifact, String context) {
String combinedContext = artifact.get(InventoryAttribute.INVENTORY_CONTEXT.getKey());
if (combinedContext != null) {
combinedContext += ", " + context;
} else {
combinedContext = context;
}
artifact.set(InventoryAttribute.INVENTORY_CONTEXT.getKey(), combinedContext);
}
private void appendVulnerabilityData(Artifact outputArtifact, Artifact artifact) {
// vulnerability
this.appendCsvValueUnique(outputArtifact, artifact, Artifact.Attribute.VULNERABILITY.getKey());
this.appendCsvValueUnique(outputArtifact, artifact, InventoryAttribute.VULNERABILITIES_FIXED_BY_KB.getKey());
this.appendCsvValueUnique(outputArtifact, artifact, InventoryAttribute.ADDON_CVES.getKey());
this.appendCsvValueUnique(outputArtifact, artifact, InventoryAttribute.INAPPLICABLE_CVE.getKey());
// cpe
this.appendCsvValueUnique(outputArtifact, artifact, InventoryAttribute.MATCHED_CPES.getKey());
this.appendCsvValueUnique(outputArtifact, artifact, InventoryAttribute.DERIVED_CPE_URIS.getKey());
this.appendCsvValueUnique(outputArtifact, artifact, InventoryAttribute.INAPPLICABLE_CPE.getKey());
this.appendCsvValueUnique(outputArtifact, artifact, InventoryAttribute.ADDITIONAL_CPE.getKey());
this.appendCsvValueUnique(outputArtifact, artifact, InventoryAttribute.INITIAL_CPE_URIS.getKey());
}
private void appendCsvValueUnique(Artifact outputArtifact, Artifact artifact, String attribute) {
final String existingValue = outputArtifact.get(attribute);
final String appendValue = artifact.get(attribute);
if (appendValue == null) {
return;
}
final Set values = new LinkedHashSet<>();
if (existingValue != null) {
values.addAll(Arrays.asList(existingValue.split(", ")));
}
values.addAll(Arrays.asList(appendValue.split(", ")));
if (!values.isEmpty()) {
outputArtifact.set(attribute, String.join(", ", values));
} else {
outputArtifact.set(attribute, null);
}
}
protected static class TimeMeasure {
private final long start = System.currentTimeMillis();
private long last = start;
public long sinceStart() {
return System.currentTimeMillis() - start;
}
public long sinceLast() {
final long now = System.currentTimeMillis();
final long diff = now - last;
last = now;
return diff;
}
public String sinceStartString() {
return TimeUtils.formatTimeDiff(sinceStart());
}
public String sinceLastString() {
return TimeUtils.formatTimeDiff(sinceLast());
}
@Override
public String toString() {
return String.format("%1$9s -> %2$9s", sinceLastString(), sinceStartString());
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy