no.hasmac.jsonld.processor.ToRdfProcessor Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of hasmac-json-ld Show documentation
Show all versions of hasmac-json-ld Show documentation
A more performant JSON-LD 1.1 Processor & API forked from Titanium JSON-LD.
The newest version!
/*
* Copyright 2020 APICATALOG and HASMAC.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package no.hasmac.jsonld.processor;
import jakarta.json.JsonArray;
import no.hasmac.jsonld.JsonLdError;
import no.hasmac.jsonld.JsonLdErrorCode;
import no.hasmac.jsonld.JsonLdOptions;
import no.hasmac.jsonld.deseralization.JsonLdToRdf;
import no.hasmac.jsonld.document.Document;
import no.hasmac.jsonld.flattening.NodeMap;
import no.hasmac.jsonld.flattening.NodeMapBuilder;
import no.hasmac.jsonld.loader.DocumentLoaderOptions;
import no.hasmac.rdf.Rdf;
import no.hasmac.rdf.RdfConsumer;
import no.hasmac.rdf.RdfDataset;
import no.hasmac.rdf.RdfValueFactory;
import java.net.URI;
/**
* @see JsonLdProcessor.toRdf()
*/
public final class ToRdfProcessor {
private ToRdfProcessor() {
}
public static RdfDataset toRdf(final URI input, final JsonLdOptions options) throws JsonLdError {
RdfDataset dataset = Rdf.createDataset();
toRdf(input, options, dataset, Rdf.createValueFactory());
return dataset;
}
public static RdfDataset toRdf(Document input, final JsonLdOptions options) throws JsonLdError {
RdfDataset dataset = Rdf.createDataset();
toRdf(input, options, dataset, Rdf.createValueFactory());
return dataset;
}
public static void toRdf(final URI input, final JsonLdOptions options, RdfConsumer rdfConsumer, RdfValueFactory rdfValueFactory) throws JsonLdError {
if (options.getDocumentLoader() == null) {
throw new JsonLdError(JsonLdErrorCode.LOADING_DOCUMENT_FAILED, "Document loader is null. Cannot fetch [" + input + "].");
}
final DocumentLoaderOptions loaderOptions = new DocumentLoaderOptions();
loaderOptions.setExtractAllScripts(options.isExtractAllScripts());
final Document remoteDocument = options.getDocumentLoader().loadDocument(input, loaderOptions);
if (remoteDocument == null) {
throw new JsonLdError(JsonLdErrorCode.LOADING_DOCUMENT_FAILED);
}
toRdf(remoteDocument, options, rdfConsumer, rdfValueFactory);
}
public static void toRdf(Document input, final JsonLdOptions options, RdfConsumer rdfConsumer, RdfValueFactory rdfValueFactory) throws JsonLdError {
final JsonLdOptions expansionOptions = new JsonLdOptions(options);
expansionOptions.setProcessingMode(options.getProcessingMode());
expansionOptions.setBase(options.getBase());
expansionOptions.setExpandContext(options.getExpandContext());
final JsonArray expandedInput = ExpansionProcessor.expand(input, expansionOptions, false);
JsonLdToRdf
.with(
NodeMapBuilder.with(expandedInput, new NodeMap()).build(),
rdfConsumer, rdfValueFactory, expansionOptions
)
.rdfDirection(options.getRdfDirection())
.uriValidation(options.isUriValidation())
.build();
}
}