
com.metreeca.rdf4j.services.Graph Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of metreeca-rdf4j Show documentation
Show all versions of metreeca-rdf4j Show documentation
Connector kit for RDF4J-based SPARQL repositories.
The newest version!
/*
* Copyright © 2013-2022 Metreeca srl
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.metreeca.rdf4j.services;
import com.metreeca.json.Frame;
import com.metreeca.rest.*;
import com.metreeca.rest.services.Logger;
import org.eclipse.rdf4j.model.*;
import org.eclipse.rdf4j.model.vocabulary.RDF;
import org.eclipse.rdf4j.query.*;
import org.eclipse.rdf4j.repository.Repository;
import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.rio.helpers.StatementCollector;
import java.time.Instant;
import java.util.*;
import java.util.function.*;
import static com.metreeca.json.Frame.frame;
import static com.metreeca.json.Values.iri;
import static com.metreeca.json.Values.literal;
import static com.metreeca.rest.Toolbox.service;
import static com.metreeca.rest.Xtream.task;
import static com.metreeca.rest.services.Logger.logger;
import static com.metreeca.rest.services.Logger.time;
import static org.eclipse.rdf4j.query.QueryLanguage.SPARQL;
import static java.lang.String.format;
import static java.time.ZoneOffset.UTC;
import static java.time.temporal.ChronoUnit.MILLIS;
import static java.util.stream.Collectors.toCollection;
/**
* Graph store.
*
* Manages task execution on an RDF {@linkplain Repository repository}.
*
* Nested task executions on the same thread will share the same connection to the backing RDF repository through a
* {@link ThreadLocal} scope variable.
*/
public final class Graph implements AutoCloseable {
private static final ThreadLocal context=new ThreadLocal<>();
/**
* Retrieves the default graph factory.
*
* @return the default graph factory, which throws an exception reporting the service as undefined
*/
public static Supplier graph() {
return () -> { throw new IllegalStateException("undefined graph service"); };
}
//// Transactions //////////////////////////////////////////////////////////////////////////////////////////////////
/**
* Creates a transaction wrapper.
*
* @return a wrapper ensuring that requests are handled within a single graph transaction
*/
public static Wrapper txn() {
final Graph graph=service(graph());
return handler -> request -> consumer -> graph.update(task(connection ->
handler.handle(request).accept(consumer)
));
}
//// SPARQL Processors /////////////////////////////////////////////////////////////////////////////////////////////
/**
* Creates a SPARQL query message filter.
*
* @param query the SPARQL graph query (describe/construct) to be executed by the new filter on target
* messages; empty scripts are ignored
* @param customizers optional custom configuration setters for the SPARQL query operation
* @param the type of the target message for the new filter
*
* @return a message filter executing the SPARQL graph {@code query} on target messages with {@linkplain
* #configure(Message, Operation, BiConsumer[]) standard bindings} and optional custom configurations; returns the
* input model extended with the statements returned by {@code query}
*
* @throws NullPointerException if any argument is null or if {@code customizers} contains null values
*/
@SafeVarargs public static > BiFunction query(
final String query, final BiConsumer... customizers
) {
if ( query == null ) {
throw new NullPointerException("null query");
}
if ( customizers == null || Arrays.stream(customizers).anyMatch(Objects::isNull) ) {
throw new NullPointerException("null customizers");
}
final Graph graph=service(graph());
final Logger logger=service(logger());
return query.isEmpty() ? (message, frame) -> frame : (message, frame) -> graph.query(connection -> {
logger.debug(Graph.class, () -> format("evaluating query %s", query));
final ArrayList model=frame.model().collect(toCollection(ArrayList::new));
time(() -> configure(
message, connection.prepareGraphQuery(SPARQL, query, message.request().base()), customizers
).evaluate(
new StatementCollector(model)
)).apply(elapsed ->
logger.debug(Graph.class, () -> format("evaluated in <%,d> ms", elapsed))
);
return frame(frame.focus(), model);
});
}
/**
* Creates a SPARQL update housekeeping task.
*
* @param update the SPARQL update script to be executed by the new housekeeping filter on target messages;
* empty scripts are ignored
* @param customizers optional custom configuration setters for the SPARQL update operation
* @param the type of the target message for the new filter
*
* @return a housekeeping task executing the SPARQL {@code update} script on target messages with {@linkplain
* #configure(Message, Operation, BiConsumer[]) standard bindings} and optional custom configurations; returns the
* input message without altering it
*
* @throws NullPointerException if any argument is null or if {@code customizers} contains null values
*/
@SafeVarargs public static > Function update(
final String update, final BiConsumer... customizers
) {
if ( update == null ) {
throw new NullPointerException("null update");
}
if ( customizers == null || Arrays.stream(customizers).anyMatch(Objects::isNull) ) {
throw new NullPointerException("null customizers");
}
final Graph graph=service(graph());
final Logger logger=service(logger());
return update.isEmpty() ? message -> message : message -> graph.update(connection -> {
logger.debug(Graph.class, () -> format("evaluating update %s", update));
time(() -> configure(
message, connection.prepareUpdate(SPARQL, update, message.request().base()), customizers
).execute()).apply(elapsed ->
logger.debug(Graph.class, () -> format("evaluated in <%,d> ms", elapsed))
);
return message;
});
}
/**
* Configures standard bindings for SPARQL operations.
*
* Configures the following pre-defined bindings for the target SPARQL operation:
*
*
*
* pre-defined bindings
*
*
*
*
* variable
* value
*
*
*
*
*
*
*
* {@code ?time}
* an {@code xsd:dateTime} literal representing the execution system time with millisecond precision
*
*
*
* {@code ?this}
* the {@linkplain Message#item() focus item} of the filtered message
*
*
*
* {@code ?stem}
* the {@linkplain IRI#getNamespace() namespace} of the IRI bound to the {@code this} variable
*
*
*
* {@code ?name}
* the local {@linkplain IRI#getLocalName() name} of the IRI bound to the {@code this} variable
*
*
*
* {@code ?task}
* the HTTP {@linkplain Request#method() method} of the original request
*
*
*
* {@code ?base}
* the {@linkplain Request#base() base} IRI of the original request
*
*
*
* {@code ?item}
* the {@linkplain Message#item() focus item} of the original request
*
*
*
* {@code ?user}
* the IRI identifying the {@linkplain Request#user() user} submitting the original request or
* {@linkplain RDF#NIL} if no user is authenticated
*
*
*
*
*
*
* If the target message is a {@linkplain Response response}, the following additional
* bindings are
* configured:
*
*
*
* response bindings
*
*
*
*
* variable
* value
*
*
*
*
*
*
*
* {@code ?code}
* the HTTP {@linkplain Response#status() status code} of the filtered response
*
*
*
*
*
*
* @param message the message to be filtered
* @param operation the SPARQL operation executed by the filter
* @param customizers optional custom configuration setters for the SPARQL {@code operation}
* @param the type f the {@code message} to be filtered
* @param the type of the SPARQL {@code operation} to be configured
*
* @return the input {@code operation} with standard bindings and optional custom configurations applied
*
* @throws NullPointerException if any argument is null or if {@code customizers} contains null values
*/
@SafeVarargs public static , O extends Operation> O configure(
final M message, final O operation, final BiConsumer... customizers
) {
if ( message == null ) {
throw new NullPointerException("null message");
}
if ( operation == null ) {
throw new NullPointerException("null operation");
}
if ( customizers == null || Arrays.stream(customizers).anyMatch(Objects::isNull) ) {
throw new NullPointerException("null customizers");
}
operation.setBinding("time", literal(Instant.now().truncatedTo(MILLIS).atZone(UTC)));
final IRI item=iri(message.item());
operation.setBinding("this", item);
operation.setBinding("stem", iri(item.getNamespace()));
operation.setBinding("name", literal(item.getLocalName()));
final Request request=message.request();
operation.setBinding("task", literal(request.method()));
operation.setBinding("base", iri(request.base()));
operation.setBinding("item", iri(request.item()));
operation.setBinding("user", request.user()
.map(v -> v instanceof Value ? (Value)v : literal(v.toString()))
.orElse(RDF.NIL)
);
if ( message instanceof Response ) {
operation.setBinding("code", literal(((Response)message).status()));
}
for (final BiConsumer customizer : customizers) {
customizer.accept(message, operation);
}
return operation;
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
private Repository repository;
/**
* Creates a graph store.
*
* @param repository the backing RDF repository
*
* @throws NullPointerException if {@code repository} is null
*/
public Graph(final Repository repository) {
if ( repository == null ) {
throw new NullPointerException("null repository");
}
this.repository=repository;
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@Override public void close() {
try {
if ( repository != null && repository.isInitialized() ) { repository.shutDown(); }
} finally {
repository=null;
}
}
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
/**
* Executes a query on this graph store.
*
* @param query the query to be executed; takes as argument a connection to the repository backing this graph store
* @param the type of the value returned by {@code query}
*
* @return the value returned by {@code query}
*
* @throws NullPointerException if {@code query} is null
*/
public V query(final Function query) {
if ( query == null ) {
throw new NullPointerException("null query");
}
if ( repository == null ) {
throw new IllegalStateException("closed graph store");
}
final RepositoryConnection shared=context.get();
if ( shared != null ) { return query.apply(shared); } else {
if ( !repository.isInitialized() ) { repository.init(); }
try ( final RepositoryConnection connection=repository.getConnection() ) {
context.set(connection);
return query.apply(connection);
} finally {
context.remove();
}
}
}
/**
* Executes an update inside a transaction on this graph store.
*
*
*
* - if a transaction is not already active on the underlying storage, begins one and commits it on
* successful update completion;
*
* - if the update throws an exception, rolls back the transaction and rethrows the exception;
*
* - in either case, no action is taken if the transaction was already terminated inside the update.
*
*
*
* @param update the update to be executed
* @param the type of the value returned by {@code update}
*
* @return the value returned by {@code update}
*
* @throws NullPointerException if {@code update} is null
*/
public V update(final Function update) {
if ( update == null ) {
throw new NullPointerException("null update");
}
return query(connection -> {
if ( connection.isActive() ) { return update.apply(connection); } else {
try {
connection.begin();
final V value=update.apply(connection);
if ( connection.isActive() ) { connection.commit(); }
return value;
} finally {
if ( connection.isActive() ) { connection.rollback(); }
}
}
});
}
}
© 2015 - 2025 Weber Informatics LLC | Privacy Policy