com.metaeffekt.artifact.analysis.report.InventoryReportModel Maven / Gradle / Ivy
The newest version!
/*
* Copyright 2021-2024 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.metaeffekt.artifact.analysis.report;
import com.metaeffekt.artifact.analysis.metascan.Constants;
import com.metaeffekt.artifact.analysis.utils.FileUtils;
import com.metaeffekt.artifact.analysis.utils.InventoryUtils;
import com.metaeffekt.artifact.analysis.utils.StringUtils;
import j2html.tags.specialized.*;
import org.apache.commons.io.filefilter.DirectoryFileFilter;
import org.apache.commons.io.filefilter.TrueFileFilter;
import org.apache.commons.lang3.Validate;
import org.json.JSONArray;
import org.json.JSONObject;
import org.metaeffekt.core.inventory.processor.model.Artifact;
import org.metaeffekt.core.inventory.processor.model.Inventory;
import org.metaeffekt.core.inventory.processor.reader.InventoryReader;
import org.metaeffekt.core.util.ColorScheme;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.*;
import java.util.stream.Collectors;
import static j2html.TagCreator.*;
import static java.lang.String.format;
public class InventoryReportModel {
public final static Logger LOG = LoggerFactory.getLogger(InventoryReportModel.class);
enum Risk {
HIGH,
MEDIUM,
LOW
}
private File reportBaseDir;
private File analysisPath;
private List resultInventories = new ArrayList<>();
private final List resultIds = new ArrayList<>();
private Inventory unifiedInventory;
private Inventory referenceInventory;
private boolean reportFailure = false;
public InventoryReportModel() {
}
/**
* Reads all inventories in a given base directory and picks the five most recent ones to evaluate later.
*
* @param reportBaseDir The directory that contains the inventories.
* @return This instance to allow for API-chaining.
* @throws IOException If one of the files could not be read as inventory.
*/
public InventoryReportModel from(File reportBaseDir) throws IOException {
this.reportBaseDir = reportBaseDir;
FileUtils.validateExists(reportBaseDir);
// iterate over inventory files in base directory and read into inventory objects
for (String inventoryFile : FileUtils.scanDirectoryForFiles(reportBaseDir, "*.xls")) {
if (inventoryFile.matches("^[0-9]*\\.xls")) {
resultInventories.add(ResultInventory.fromFile(new File(reportBaseDir, inventoryFile)));
}
}
// manage number of results; limit to the latest five entries (keep files in the file system)
resultInventories = resultInventories.stream()
.sorted()
.skip(Math.max(0, resultInventories.size() - 5))
.collect(Collectors.toList());
return this;
}
public InventoryReportModel withReferenceInventory(File referenceInventoryFile) throws IOException {
return withReferenceInventory(new InventoryReader().readInventory(referenceInventoryFile));
}
public InventoryReportModel withReferenceInventory(Inventory referenceInventory) {
this.referenceInventory = referenceInventory;
return this;
}
/**
* By setting the .analysis directory from the scan, the JSON license segmentation files can be used to add this
* data to the individual artifacts entries in the generated report.
*
* @param analysisPath The base .analysis directory.
* @return This instance to allow for API-Chaining.
*/
public InventoryReportModel withAnalysisPath(File analysisPath) {
this.analysisPath = analysisPath;
return this;
}
/**
* Collect all result inventories into a unified inventory that contains all relevant data from the result inventories.
*
* @return This instance to allow for API-chaining.
* @throws IOException If one of the paths inside the inventory Analysis Path
column is invalid, or,
* if one of the json segmentation files could not be read.
*/
public InventoryReportModel evaluate() throws IOException {
this.unifiedInventory = new Inventory();
// if the analysis path was set, extract the license segmentation json files from the directory
Map segmentations = extractSegmentationFromAnalysisPath();
if (segmentations == null || segmentations.size() == 0) {
if (analysisPath != null) {
if (analysisPath.exists())
LOG.warn("Analysis path is set to [{}] but the directory does not exist", analysisPath.getAbsolutePath());
else
LOG.warn("Analysis path is set to [{}] but no segmentation files were found in the directory", analysisPath.getAbsolutePath());
}
}
// iterate over all inventories set by from(File reportBaseDir) and collect their artifacts into a unified
// inventory that contains all artifacts while preserving their relevant data by adding columns.
for (ResultInventory resultInventory : resultInventories) {
final Inventory inventory = resultInventory.getInventory();
addArtifactsToUnifiedInventory(inventory);
for (final Artifact artifact : inventory.getArtifacts()) {
final Artifact unifiedArtifact = findUnifiedArtifact(artifact);
// invariant; all artifacts are now in the unified inventory
Validate.notNull(unifiedArtifact, format("All artifacts must be covered by the unified inventory. [%s] found missing.", artifact.getId()));
final String key = String.valueOf(resultInventory.getTimestamp());
if (!resultIds.contains(key)) {
resultIds.add(key);
}
final String derivedLicense = artifact.get(Constants.KEY_DERIVED_LICENSES);
unifiedArtifact.set(key, derivedLicense);
// add the analysis paths as separate columns per resultInventory
String analysisPath = artifact.get("Analysis Path");
if (analysisPath != null) {
final File analysisDir = new File(analysisPath);
if (analysisDir.exists()) {
analysisPath = FileUtils.asRelativePath(reportBaseDir.getAbsolutePath(), analysisDir.getAbsolutePath());
}
final String filename = artifact.getId().replace("/", "_");
unifiedArtifact.set(key + "-link", analysisPath + "-analysis" + "/" + filename + "_license-segmentation.txt");
}
// add the identified terms as separate columns per resultInventory
String identifiedTerms = artifact.get("Identified Terms");
if (identifiedTerms != null) {
unifiedArtifact.set(key + "-terms", identifiedTerms);
}
// add the segmentation information as separate columns per resultInventory if given
if (segmentations != null) {
JSONObject segmentation = findSegmentation(segmentations, analysisPath, unifiedArtifact);
if (segmentation != null) unifiedArtifact.set(key + "-segmentation", segmentation.toString());
}
}
}
Collections.reverse(resultIds);
// add selected data from reference inventory
for (Artifact artifact : unifiedInventory.getArtifacts()) {
Artifact referenceArtifact = findReferenceArtifact(artifact);
if (referenceArtifact != null) {
artifact.setGroupId(referenceArtifact.getGroupId());
artifact.setVersion(referenceArtifact.getVersion());
artifact.setLicense(referenceArtifact.getLicense());
String derivedLicense = referenceArtifact.get(Constants.KEY_DERIVED_LICENSES);
if (derivedLicense == null) {
derivedLicense = referenceArtifact.getLicense();
}
artifact.set(Constants.KEY_DERIVED_LICENSES, derivedLicense);
}
}
return this;
}
/**
* Scan the entire analysis directory for license segmentation json results and return an array containing the file
* objects and the already parsed JSON files.
*
* @return A map containing all license segmentation files with the parsed JSON objects.
* @throws IOException If one of the files could not be read.
*/
private Map extractSegmentationFromAnalysisPath() throws IOException {
Map segmentations = new HashMap<>();
if (analysisPath != null && analysisPath.exists()) {
List jsonFiles = FileUtils.listFiles(analysisPath, TrueFileFilter.INSTANCE, DirectoryFileFilter.DIRECTORY).stream().sorted().collect(Collectors.toList());
jsonFiles = jsonFiles.stream().filter(f -> f.getName().endsWith("_license-segmentation.json")).collect(Collectors.toList());
for (File segmentationFile : jsonFiles) {
segmentations.put(segmentationFile, new JSONObject(String.join("", FileUtils.readLines(segmentationFile, StandardCharsets.UTF_8))));
}
}
return segmentations.size() == 0 ? null : segmentations;
}
/**
* Finds the JSON segmentation information for an artifact with an optional analysis path (.analysis
directory).
* Will first attempt to find a file that contains the analysisPath
, then fallback to the artifactId
.
*
* @param segmentations The segmentation information, containing the files mapped to their already read JSON objects.
* @param analysisPath The path to the analysis directory, optional.
* @param artifact The artifact of which the ID to use if the analysisPath
is null or wasn't found in the map.
* @return The segmentation as JSON, or null if none was found.
*/
private JSONObject findSegmentation(Map segmentations, String analysisPath, Artifact artifact) {
if (analysisPath != null) {
final String filename = artifact.getId().replace("/", "_");
JSONObject result = findSegmentation(segmentations, analysisPath.replaceAll("^\\.\\./", "") + "-analysis" + "/" + filename + "_license-segmentation.json");
if (result != null) return result;
}
return findSegmentation(segmentations, artifact.getId());
}
/**
* Searches the segmentation map for a given string.
* The first occurrence of the searchString
in the file path will be returned, or null if none was found.
*
* @param segmentations The segmentation information, containing the files mapped to their already read JSON objects.
* @param searchString The string to search for.
* @return The segmentation as JSON, or null if none was found.
*/
private JSONObject findSegmentation(Map segmentations, String searchString) {
for (Map.Entry segmentationEntry : segmentations.entrySet()) {
if (segmentationEntry.getKey().getAbsolutePath().contains(searchString)) {
return segmentationEntry.getValue();
}
}
return null;
}
/**
* Will add all artifacts to the unified inventory while preserving relevant existing artifact data.
*
* @param inventory The inventory to merge the artifacts from.
*/
protected void addArtifactsToUnifiedInventory(Inventory inventory) {
for (final Artifact artifact : inventory.getArtifacts()) {
final Artifact referenceArtifact = findReferenceArtifact(artifact);
final Artifact unifiedArtifact = findUnifiedArtifact(artifact);
final Artifact newArtifact = new Artifact();
if (referenceArtifact == null) {
newArtifact.setId(artifact.getId());
// no reference artifact; we keep the checksum unrestricted in the unified inventory
newArtifact.setChecksum(null);
// only add if not already contained
if (unifiedArtifact == null) {
unifiedInventory.getArtifacts().add(newArtifact);
}
} else {
newArtifact.setId(referenceArtifact.getId());
newArtifact.setChecksum(referenceArtifact.getChecksum());
if (unifiedInventory.findArtifact(newArtifact.getId()) == null) {
unifiedInventory.getArtifacts().add(newArtifact);
}
}
}
}
private Artifact findUnifiedArtifact(Artifact artifact) {
// find by id and checksum
Artifact unifiedArtifact = unifiedInventory.findArtifactByIdAndChecksum(artifact.getId(), artifact.getChecksum());
// ... or by id only; if no checksum match was found
if (unifiedArtifact == null) {
unifiedArtifact = unifiedInventory.findArtifact(artifact.getId());
}
return unifiedArtifact;
}
private Artifact findReferenceArtifact(Artifact artifact) {
// find by id and checksum
Artifact referenceArtifact = referenceInventory.findArtifactByIdAndChecksum(artifact.getId(), artifact.getChecksum());
// ... find by id; ensure checksum is null (not to match an artifact with checksum)
if (referenceArtifact == null) {
referenceArtifact = referenceInventory.findArtifactByIdAndChecksum(artifact.getId(), null);
}
// ... find fuzzy to get any reference
if (referenceArtifact == null) {
referenceArtifact = referenceInventory.findArtifact(artifact, true);
}
return referenceArtifact;
}
public void explain() {
LOG.info(toString());
explainUnifiedInventory();
}
protected void explainUnifiedInventory() {
if (unifiedInventory != null) {
for (Artifact artifact : unifiedInventory.getArtifacts()) {
LOG.info("{}<{}>:", artifact.getId(), artifact.getChecksum());
ArrayList orderedAttributes = new ArrayList<>(artifact.getAttributes());
Collections.sort(orderedAttributes);
for (String attribute : orderedAttributes) {
LOG.info(" {} = {}", attribute, artifact.get(attribute));
}
}
}
}
@Override
public String toString() {
return "InventoryReportModel{" +
"resultInventories=" + resultInventories +
'}';
}
/**
* Generates a report that compares the expected to the derived licenses.
*
* @param file The file to write the generated report to.
*/
public void createHtmlReport(File file) {
// find the latest entry if it exists
String latestKey;
if (!resultIds.isEmpty()) latestKey = resultIds.get(0);
else latestKey = null;
// build the table body by iterating over all artifacts
TbodyTag tableBody = tbody();
for (Artifact artifact : unifiedInventory.getArtifacts()) {
final Status statusDerivedLicenseRef = evaluateDerivedLicenseRef(artifact);
final Status statusLicenseRef = evaluateLicenseRef(artifact);
Set highMessages = new HashSet<>();
Set mediumMessages = new HashSet<>();
Set lowMessages = new HashSet<>();
addMessage(highMessages, mediumMessages, statusDerivedLicenseRef);
addMessage(highMessages, mediumMessages, statusLicenseRef);
// list derived and expected license
TdTag derivedLicensesRefCell = td().withClass("tg-0lax-r" + statusDerivedLicenseRef.risk.name().charAt(0));
Iterator iterator = Arrays.stream(valueOf(artifact.get(Constants.KEY_DERIVED_LICENSES)).split(", ?")).iterator();
while (true) {
derivedLicensesRefCell.withText(iterator.next());
if (iterator.hasNext()) derivedLicensesRefCell.with(hr().withClass("half-linebreak"));
else break;
}
TdTag expectedLicensesRefCell = td().withClass("tg-0lax-r" + statusLicenseRef.risk.name().charAt(0));
iterator = Arrays.stream(valueOf(artifact.getLicense()).split(", ?")).iterator();
while (true) {
expectedLicensesRefCell.withText(iterator.next());
if (iterator.hasNext()) expectedLicensesRefCell.with(hr().withClass("half-linebreak"));
else break;
}
// list all previous iteration's licenses
List dynamicTableColumnElements = new ArrayList<>();
for (final String key : resultIds) {
List licenses = Arrays.stream((StringUtils.isEmpty(artifact.get(key)) || artifact.get(key).equals("") ? "<none>" : artifact.get(key)).split(", ?")).map(String::trim).collect(Collectors.toList());
String filePathLink = artifact.get(key + "-link");
final Set statusDerivedLicense = evaluateDerivedLicense(artifact, key);
String classDerivedLicense = "tg-0lax-r" + statusDerivedLicense.stream().min(Comparator.comparing(status -> status.risk.ordinal())).map(status -> status.risk.name()).orElse("L").charAt(0);
boolean containsIncompleteMatch = statusDerivedLicense.stream().anyMatch(status -> status.reason.equals("Incomplete Match."));
// modulate only for latest results
if (key.equalsIgnoreCase(latestKey)) {
statusDerivedLicense.forEach(status -> addMessage(highMessages, mediumMessages, status));
} else {
statusDerivedLicense.forEach(status -> addMessage(lowMessages, status));
}
// iterate over all licenses to create the references to the metaeffekt universe and if it is the
// latest artifact version, the directory path structure for each of them
TdTag cellValue = td().withClasses(classDerivedLicense, "cell-actual-licenses");
for (Iterator iter = licenses.iterator(); iter.hasNext(); ) {
String license = iter.next();
// if the universe link could not be created, add the regular file path link
String metaeffektUniverseUrl = getMetaeffektUniverseUrl(license);
if (metaeffektUniverseUrl != null) {
cellValue.with(a(rawHtml(license)).withHref(metaeffektUniverseUrl).withTarget("aeuniverse"));
} else if (filePathLink != null) {
cellValue.with(a(rawHtml(license)).withHref("file:" + filePathLink).withTarget("filepath"));
} else {
cellValue.with(rawHtml(license));
}
if (key.equalsIgnoreCase(latestKey)) {
// get the segmentation information and build a document tree view of the files contained
if (artifact.get(latestKey + "-segmentation") != null) {
JSONObject segmentation = new JSONObject(artifact.get(latestKey + "-segmentation"));
JSONObject licenseOverview = segmentation.optJSONObject("license.overview");
Set paths = new HashSet<>();
if (licenseOverview != null) {
JSONArray currentLicenseFiles = licenseOverview.optJSONArray(license);
if (currentLicenseFiles != null) {
for (int i = 0; i < currentLicenseFiles.length(); i++) {
String path = currentLicenseFiles.optString(i, null);
// FIXME: temporary mitigation / eliminate segment for report view
// Resolve with AEAA-251
path = path.substring(0, path.lastIndexOf("/"));
if (path != null) paths.add(path);
}
}
}
// split the paths into nodes and build a tree view using those
cellValue.with(buildDocumentTree(PathNode.makeNodes(paths), null));
}
}
// add some linebreaks if there are more licenses to come
if (iter.hasNext()) {
cellValue.with(hr().withClass("half-linebreak"));
}
}
cellValue.with(
iff(containsIncompleteMatch, i(hr().withClass("half-linebreak"), text(" (Incomplete Match)")))
);
dynamicTableColumnElements.add(cellValue);
}
TrTag tableRow = tr(
td().withClass("tg-0lax").with(
b(text(valueOf(artifact.getId()))),
iff(artifact.getChecksum() != null, i(hr().withClass("half-linebreak"), text(artifact.getChecksum())))
),
expectedLicensesRefCell,
derivedLicensesRefCell,
each(dynamicTableColumnElements.stream().map(d -> d))
);
String classStatus = highMessages.isEmpty() ? (mediumMessages.isEmpty() ? "tg-0lax-rL" : "tg-0lax-rM") : "tg-0lax-rH";
Set reasons = highMessages.isEmpty() ? (mediumMessages.isEmpty() ? lowMessages : mediumMessages) : highMessages;
tableRow.with(
td().withClass(classStatus).withText(reasons.stream().sorted().collect(Collectors.joining(", ")))
);
tableBody.with(tableRow);
this.reportFailure |= !highMessages.isEmpty();
}
TrTag tableHeadElements = tr(
th().withClass("tg-0lax").with(b("Artifact Id")),
th().withClass("tg-0lax").with(b("Curated Reference Licenses")),
th().withClass("tg-0lax").with(b("Expected Identified Licenses"))
);
for (String key : resultIds) {
tableHeadElements.with(
th().withClass("tg-0lax").with(
b("Actual Identified Licenses"), br(),
text(dateOf(key)),
iff(key.equalsIgnoreCase(latestKey), join(br(), b("LATEST")))
)
);
}
tableHeadElements.with(th().withClass("tg-0lax").with(b("Status")));
HtmlTag htmlDocument = html().withLang("en").with(
head(
meta().withCharset("UTF-8"),
title("Inventory Report"),
link().withRel("icon")
.withHref("data:image/svg+xml,"),
style(ColorScheme.cssRoot() +
"p{font-family:Arial,sans-serif;font-size:10px;font-weight:400;padding:10px 5px;overflow:hidden;word-break:normal;background-color:#fafafa}" +
".tg thead{position:sticky;top:0}" + // table styles
".tg{box-shadow:0 0 2px 1px #00000087;border-collapse:collapse;border-spacing:0;border-radius:3px}" +
".tg td{font-family:Arial,sans-serif;font-size:14px;padding:10px 10px;overflow:hidden;word-break:normal}" +
".tg th{padding:10px 5px 7px 10px;font-family:Arial,sans-serif;font-size:14px;font-weight:400;overflow:hidden;word-break:normal;background-color:var(--pastel-gray);background-clip:padding-box}" +
".tg .tg-0lax{text-align:left;vertical-align:top}" +
".tg .tg-0lax-rH{text-align:left;vertical-align:top;background-color:var(--pastel-red);}" +
".tg .tg-0lax-rM{text-align:left;vertical-align:top;background-color:var(--pastel-yellow)}" +
".tg .tg-0lax-rL{text-align:left;vertical-align:top;background-color:var(--pastel-green);}" +
//".tg th{border:1px solid #a6a6a6}" +
".tg td{border:1px solid #00000045}" +
".tg tr:first-child th{border-top:0}" +
".tg tr:last-child td{border-bottom:0}" +
".tg tr td:first-child,.tg tr th:first-child{border-left:0}" +
".tg tr td:last-child,.tg tr th:last-child{border-right:0}" +
".tg tr:last-child td:first-child{border-bottom-left-radius:3px}" +
".tg tr:last-child td:last-child{border-bottom-right-radius:3px}" +
".tg tr:first-child th:first-child{border-top-left-radius:3px}" +
".tg tr:first-child th:last-child{border-top-right-radius:3px}" +
".tg tbody tr:hover{background-color:var(--pastel-white);}" +
".tg{width:100%;}" + // table elements correct width
".cell-actual-licenses{max-width:20%;word-wrap:break-word;}" + // table elements correct width
"a:link{text-decoration:underline;color:#000}" + // link style
"a:visited{text-decoration:underline;color:#000}" +
"a:hover{text-decoration:underline;color:#000}" +
"a:link{text-decoration:underline;color:#000}" +
"a:active{text-decoration:underline;color:#000}" +
"tree-view-parent > ul, li{list-style-type:none;}" + // tree view
".tree-view-parent{margin:0;margin-left:15px;margin-top:2px;padding:0;}" +
".tree-view-caret{cursor:pointer;-webkit-user-select:none;/* Safari 3.1+ */-moz-user-select:none;/* Firefox 2+ */-ms-user-select:none;/* IE 10+ */user-select:none;margin-left:-15px;}" +
".tree-view-caret::before{content:\"\\25B6\";color:black;display:inline-block;margin-right:6px;font-size:10px;}" +
".tree-view-caret-down::before{-ms-transform:rotate(90deg);/* IE 9 */-webkit-transform:rotate(90deg);/* Safari */transform:rotate(90deg);}" +
".tree-view-nested{display:none;padding-left:13px;}" +
".tree-view-active{display:block;}" +
"hr.half-linebreak{margin:0;height:7px;border:none;color:transparent;background-color:transparent;}"
).withType("text/css")
), body(
table().withClass("tg").with(
thead(tableHeadElements),
tableBody
),
p(i(rawHtml("Report created by {metæffekt} Artifact Analysis Plugin"))),
script("" +
"function toggleTreeView(element) {" +
" element.parentElement.querySelector('.tree-view-nested').classList.toggle('tree-view-active');" +
" element.classList.toggle('tree-view-caret-down');" +
"}" +
"function initialize() {" + // this function is called when the document has loaded
" let toggler = document.getElementsByClassName('tree-view-caret');" +
" console.log(toggler.length);" +
" console.log(toggler);let i = 0;" +
" for (i = 0; i < toggler.length; i++) {" +
" console.log(i);" +
" console.log(toggler[i]);" +
" toggler[i].addEventListener('click', function() {toggleTreeView(this)});" +
" }" +
"}" +
"document.onload = initialize();").withType("text/javascript")
)
);
try {
FileUtils.write(file, htmlDocument.render(), StandardCharsets.UTF_8);
} catch (IOException e) {
LOG.error("Unable to write Inventory Report to file [" + file.getAbsolutePath() + "]", e);
}
}
private void addMessage(Set highMessages, Set mediumMessages, Status status) {
switch (status.risk) {
case HIGH:
highMessages.add(status.reason);
case MEDIUM:
mediumMessages.add(status.reason);
}
}
private void addMessage(Set lowMessages, Status status) {
switch (status.risk) {
case HIGH:
lowMessages.add(status.reason + " (in a previous iteration)");
case MEDIUM:
lowMessages.add(status.reason + " (in a previous iteration)");
}
}
private Set evaluateDerivedLicense(Artifact artifact, String key) {
Set statuses = new HashSet<>();
final String derivedLicenseRef = valueOf(artifact.get(Constants.KEY_DERIVED_LICENSES));
final String licenseRef = valueOf(artifact.getLicense());
final String derivedLicense = valueOf(artifact.get(key));
String identifiedTerms = artifact.get(key + "-terms");
if (identifiedTerms != null && Arrays.asList(identifiedTerms.split(", ?")).contains("Incomplete Match")) {
statuses.add(new Status(Risk.MEDIUM, "Incomplete Match."));
}
// split the license string into individual licenses
List derivedLicensesRef = InventoryUtils.tokenizeLicense(derivedLicenseRef, false, true);
List licensesRef = InventoryUtils.tokenizeLicense(licenseRef, false, true);
List derivedLicenses = InventoryUtils.tokenizeLicense(derivedLicense, false, true);
// check whether expectation is that none should be matched
derivedLicenses.remove("");
if (derivedLicenses.size() == 0 && licensesRef.size() > 0 && licensesRef.get(0).equals("")) {
statuses.add(evaluateLicenseRef(artifact));
return statuses;
} else if (derivedLicenses.size() == 0 && derivedLicensesRef.size() > 0 && derivedLicensesRef.get(0).equals("")) {
statuses.add(evaluateDerivedLicenseRef(artifact));
return statuses;
}
// find the difference between the derived and reference licenses
Set overhead = new HashSet<>(derivedLicenses);
licensesRef.forEach(overhead::remove);
Set underhead = new HashSet<>(licensesRef);
derivedLicenses.forEach(underhead::remove);
if (overhead.isEmpty() && underhead.isEmpty()) {
statuses.add(evaluateLicenseRef(artifact));
return statuses;
}
Set overheadRef = new HashSet<>(derivedLicenses);
derivedLicensesRef.forEach(overheadRef::remove);
Set underheadRef = new HashSet<>(derivedLicensesRef);
derivedLicenses.forEach(underheadRef::remove);
if (!overheadRef.isEmpty()) {
statuses.add(new Status(Risk.HIGH, "Additional license detected."));
return statuses;
}
if (underheadRef.isEmpty()) {
statuses.add(evaluateDerivedLicenseRef(artifact));
return statuses;
}
statuses.add(new Status(Risk.MEDIUM, "Fewer licenses detected."));
return statuses;
}
private class Status {
Risk risk;
String reason;
public Status(Risk risk, String reason) {
this.risk = risk;
this.reason = reason;
}
}
private Status evaluateDerivedLicenseRef(Artifact artifact) {
if (artifact.getLicense() == null) {
return new Status(Risk.HIGH, "No expectation defined.");
}
if (!artifact.getLicense().equalsIgnoreCase(artifact.get(Constants.KEY_DERIVED_LICENSES)))
return new Status(Risk.MEDIUM, "Inconsistency between curated and expectation detected.");
return evaluateLicenseRef(artifact);
}
private Status evaluateLicenseRef(Artifact artifact) {
if (artifact.getLicense() == null) {
return new Status(Risk.HIGH, "No expectation defined.");
}
return new Status(Risk.LOW, "Curated license provided.");
}
private String dateOf(String key) {
long l = Long.parseLong(key);
return new Date(l).toString();
}
public String valueOf(Object value) {
if (value == null) return "";
return String.valueOf(value);
}
public boolean isReportFailure() {
return reportFailure;
}
/**
* Converts a license name into the according metaeffekt universe overview url. For example:
* License: Creative Commons BY 4.0
* Url: https://github.com/org-metaeffekt/metaeffekt-universe/blob/main/src/main/resources/ae-universe/[c]/overview.md#Creative%20Commons%20BY%204.0
*
* @param license Any license name that is contained in the metaeffekt universe.
* @return The metaeffekt universe overview url to the license or null, if the given license is in an invalid format / not a license.
*/
public String getMetaeffektUniverseUrl(String license) {
if (license == null || license.length() == 0 || license.equals("<none>")) return null;
String namespace = (license.charAt(0) + "").toLowerCase();
String anchor = license.replace(" ", "%20").replace("-", "%2D");
String scrollToTextFragment = ":~:text=" + anchor;
return "https://github.com/org-metaeffekt/metaeffekt-universe/blob/main/src/main/resources/ae-universe/[" +
namespace + "]/README.md#" + scrollToTextFragment;
}
private UlTag buildDocumentTree(List nodes, UlTag parent) {
boolean isTopmostLayer = parent == null;
if (parent == null) parent = ul().withClass("tree-view-parent");
if (nodes == null || nodes.size() == 0) return parent;
for (PathNode node : nodes.stream().sorted(Comparator.comparing(PathNode::hasChildNodes)).collect(Collectors.toList())) {
LiTag entry = li();
if (node.hasChildNodes()) {
if (isTopmostLayer) {
entry.with(
span(node.getIdentifier()).withClasses("tree-view-caret"),
buildDocumentTree(node.getChildNodes(), ul().withClasses("tree-view-nested"))
);
} else {
entry.with(
span(node.getIdentifier()).withClasses("tree-view-caret", "tree-view-caret-down"),
buildDocumentTree(node.getChildNodes(), ul().withClasses("tree-view-nested", "tree-view-active"))
);
}
} else {
entry.with(text(node.getIdentifier()));
}
parent.with(entry);
}
return parent;
}
private static class PathNode {
private final List childNodes = new ArrayList<>();
private final String identifier;
private final boolean leadingSlash;
public PathNode(String path) {
if (path != null && path.length() > 0) {
List split = splitPath(path);
this.identifier = split.get(0);
this.leadingSlash = path.startsWith("/") || path.startsWith("\\");
addPath(split);
} else {
identifier = null;
leadingSlash = false;
}
}
public PathNode(List path) {
if (path != null && path.size() > 0) {
this.identifier = path.get(0);
this.leadingSlash = false;
addPath(path);
} else {
identifier = null;
leadingSlash = false;
}
}
public List getChildNodes() {
return childNodes;
}
public String getIdentifier() {
return identifier;
}
public boolean hasChildNodes() {
return childNodes.size() > 0;
}
public void addPath(String path) {
addPath(splitPath(path));
}
private void addPath(List split) {
if (split.size() <= 1) return;
split.remove(0);
for (PathNode childNode : childNodes) {
if (childNode.isSubPath(split)) {
childNode.addPath(split);
return;
}
}
childNodes.add(new PathNode(split));
}
public boolean isSubPath(String path) {
return isSubPath(splitPath(path));
}
public boolean isSubPath(List path) {
return path.size() > 0 && path.get(0).equals(identifier);
}
private List splitPath(String path) {
return Arrays.stream(path.split("[/\\\\]+")).filter(l -> l.length() > 0).collect(Collectors.toList());
}
public List getAllPaths() {
List paths = new ArrayList<>();
if (childNodes.size() > 0) {
for (PathNode childNode : childNodes) {
childNode.getAllPaths((leadingSlash ? "/" : "") + identifier, paths);
}
} else {
paths.add((leadingSlash ? "/" : "") + identifier);
}
return paths;
}
private void getAllPaths(String current, List paths) {
current = current + "/" + identifier;
if (childNodes.size() > 0) {
for (PathNode childNode : childNodes) {
childNode.getAllPaths(current, paths);
}
} else {
paths.add(current);
}
}
public static List makeNodes(Collection paths) {
List nodes = new ArrayList<>();
for (String path : paths) {
if (nodes.size() == 0) {
nodes.add(new PathNode(path));
} else {
boolean foundMatching = false;
for (PathNode node : nodes) {
if (node.isSubPath(path)) {
node.addPath(path);
foundMatching = true;
break;
}
}
if (!foundMatching) {
nodes.add(new PathNode(path));
}
}
}
return nodes;
}
@Override
public String toString() {
return identifier;
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy