com.metaeffekt.artifact.analysis.flow.ng.crypt.licensetexts.TextCollectionProducer Maven / Gradle / Ivy
/*
* Copyright 2021-2024 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.metaeffekt.artifact.analysis.flow.ng.crypt.licensetexts;
import com.metaeffekt.artifact.analysis.flow.ng.ContentAlgorithmParam;
import com.metaeffekt.artifact.analysis.flow.ng.KeypairGenerator;
import com.metaeffekt.artifact.analysis.flow.ng.TermMetaDataProducer;
import com.metaeffekt.artifact.analysis.flow.ng.crypt.param.GenericTmdProducerSpec;
import com.metaeffekt.artifact.analysis.flow.ng.crypt.param.NormMetaSupplierParameters;
import com.metaeffekt.artifact.analysis.flow.ng.crypt.param.ProviderParameters;
import com.metaeffekt.artifact.analysis.flow.ng.keyholder.UserKeysForConsumer;
import com.metaeffekt.artifact.analysis.flow.ng.keyholder.UserKeysForSupplier;
import com.metaeffekt.artifact.terms.model.LicenseTextEntry;
import com.metaeffekt.artifact.terms.model.NormalizationMetaData;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.tuple.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.nio.file.Files;
import java.security.SecureRandom;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import static com.metaeffekt.artifact.analysis.flow.ng.EncryptedArtifactProducerUtils.*;
/**
* Helper for writing a collection of license texts using {@link com.metaeffekt.artifact.analysis.flow.ng.crypt.licensetexts.EncryptedTextCollectionSupplier}.
*/
public class TextCollectionProducer {
private static final Logger LOG = LoggerFactory.getLogger(TermMetaDataProducer.class);
/**
* Produces an encrypted text collection using the given params (and referenced data in the file system).
*
* @param params containing parameters used in the process.
* @throws Exception unlikely the exceptions thrown here could be remedied in code, honestly. Rethrowing for logs.
*/
public static void produce(GenericTmdProducerSpec params) throws Exception {
// temp dir for temporary keys. unused if test is disabled
File tempDir = Files.createTempDirectory("textCollectGenerate-tempKeys-").toFile();
FileUtils.forceDeleteOnExit(tempDir);
final File dummyDecryptionKeysDir = new File(tempDir, "decryptionTestKeys");
final File allowedKeysWithDummy = new File(tempDir, "allowedKeys");
final File dummyZipOutputFile = new File(tempDir, "text-collection");
SecureRandom random = new SecureRandom();
File dummyKeyForUser = null;
String temporaryPassword = null;
if (params.isEnableImplicitTest()) {
// initialize a dummy access key for testing
Pair keypair = KeypairGenerator.createUserKeyPair(random);
UserKeysForSupplier keysForSupplier = keypair.getLeft();
UserKeysForConsumer keysForConsumer = keypair.getRight();
File dummyKeyForPublisher = new File(allowedKeysWithDummy, "dummyKeyForPublisher.tmp.json");
dummyKeyForUser = new File(dummyDecryptionKeysDir, "dummyKeyForUser.tmp.json");
temporaryPassword = KeypairGenerator.generateUserKeysPassword(64);
// write the generated dummy key
writeDummyKeys(dummyKeyForPublisher, dummyKeyForUser, keysForSupplier, keysForConsumer, temporaryPassword);
}
// read normalizationMetaData from file system; needed for generation
final NormalizationMetaData normalizationMetaData = new NormalizationMetaData(params.getTermsMetaDataBaseDir());
// filter customer related information
filterCustomerMetaData(normalizationMetaData);
// prepare for encryption
final NormMetaSupplierParameters supplierParameters = new NormMetaSupplierParameters(
new ContentAlgorithmParam(),
normalizationMetaData,
params.getAllowedUserKeysDir(),
params.getLicenseTextFile(),
params.getZipOutputFile()
);
LOG.info("Beginning output of encrypted content.");
// encrypt to blob and write keyslots file
final EncryptedTextCollectionSupplier supplier = new EncryptedTextCollectionSupplier();
supplier.process(supplierParameters);
if (params.isEnableImplicitTest()) {
LOG.info("Beginning test with dummy key.");
// separate encryption run with the same data but an additional dummy key
final NormMetaSupplierParameters dummySupplierParam = new NormMetaSupplierParameters(
new ContentAlgorithmParam(),
normalizationMetaData,
allowedKeysWithDummy,
params.getLicenseTextFile(),
dummyZipOutputFile
);
// encrypt to blob
final EncryptedTextCollectionSupplier dummySupplier = new EncryptedTextCollectionSupplier();
dummySupplier.process(dummySupplierParam);
// decryption crash test
ProviderParameters providerParameters = new ProviderParameters(
new ContentAlgorithmParam(),
temporaryPassword,
dummyKeyForUser,
dummyZipOutputFile);
List canonicalNames = Collections.singletonList("Apache License 2.0");
// decrypt blob for testing
final EncryptedTextCollectionReader provider = new EncryptedTextCollectionReader();
Map queryResult = provider.findLicenses(
providerParameters,
Collections.singleton("Apache License 2.0")
);
// check result
assertEquals(queryResult.get("Apache License 2.0").getCanonicalName(), "Apache License 2.0");
// make sure we got the correct data
assertEquals(1, queryResult.size());
assertEquals(true, queryResult.keySet().containsAll(canonicalNames));
for (String name : canonicalNames) {
if (queryResult.get(name) == null) {
throw new RuntimeException("Self-test failed: text collection didn't contain " + name + ".");
}
assertEquals(name, queryResult.get(name).getCanonicalName());
}
// dump query result
LOG.info("Collection query result: " + queryResult);
} else {
LOG.warn("Implicit output testing is disabled!");
}
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy