Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.marklogic.semantics.sesame.client.MarkLogicClientImpl Maven / Gradle / Ivy
/*
* Copyright 2015 MarkLogic Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* A library that enables access to a MarkLogic-backed triple-store via the
* Sesame API.
*/
package com.marklogic.semantics.sesame.client;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.marklogic.client.DatabaseClient;
import com.marklogic.client.DatabaseClientFactory;
import com.marklogic.client.FailedRequestException;
import com.marklogic.client.Transaction;
import com.marklogic.client.impl.SPARQLBindingsImpl;
import com.marklogic.client.io.FileHandle;
import com.marklogic.client.io.InputStreamHandle;
import com.marklogic.client.query.QueryDefinition;
import com.marklogic.client.semantics.*;
import com.marklogic.semantics.sesame.MarkLogicSesameException;
import org.openrdf.model.Literal;
import org.openrdf.model.Resource;
import org.openrdf.model.URI;
import org.openrdf.model.Value;
import org.openrdf.query.Binding;
import org.openrdf.repository.sparql.query.SPARQLQueryBindingSet;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.InputStream;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
/**
* internal class for interacting with java api client
*
* @author James Fuller
*/
class MarkLogicClientImpl {
protected final Logger logger = LoggerFactory.getLogger(MarkLogicClientImpl.class);
private static final String DEFAULT_GRAPH_URI = "http://marklogic.com/semantics#default-graph";
private SPARQLRuleset[] ruleset;
private QueryDefinition constrainingQueryDef;
private GraphPermissions graphPerms;
private SPARQLQueryManager sparqlManager;
private GraphManager graphManager;
private DatabaseClient databaseClient;
/**
* constructor
*
* @param host
* @param port
* @param user
* @param password
* @param auth
*/
public MarkLogicClientImpl(String host, int port, String user, String password, String auth) {
setDatabaseClient(DatabaseClientFactory.newClient(host, port, user, password, DatabaseClientFactory.Authentication.valueOf(auth)));
}
/**
* set databaseclient
*
* @param databaseClient
*/
public MarkLogicClientImpl(DatabaseClient databaseClient) {
setDatabaseClient(databaseClient);
}
/**
* set databaseclient and instantate related managers
*
* @param databaseClient
*/
private void setDatabaseClient(DatabaseClient databaseClient) {
this.databaseClient = databaseClient;
this.sparqlManager = getDatabaseClient().newSPARQLQueryManager();
this.graphManager = getDatabaseClient().newGraphManager();
}
/**
* gets database client
*
* @return DatabaseClient
*/
public DatabaseClient getDatabaseClient() {
return this.databaseClient;
}
/**
* executes SPARQLQuery
*
* @param queryString
* @param bindings
* @param start
* @param pageLength
* @param tx
* @param includeInferred
* @param baseURI
* @return
* @throws JsonProcessingException
*/
public InputStream performSPARQLQuery(String queryString, SPARQLQueryBindingSet bindings, long start, long pageLength, Transaction tx, boolean includeInferred, String baseURI) throws JsonProcessingException {
return performSPARQLQuery(queryString, bindings, new InputStreamHandle(), start, pageLength, tx, includeInferred, baseURI);
}
/**
* executes SPARQLQuery
*
* @param queryString
* @param bindings
* @param handle
* @param start
* @param pageLength
* @param tx
* @param includeInferred
* @param baseURI
* @return
* @throws JsonProcessingException
*/
public InputStream performSPARQLQuery(String queryString, SPARQLQueryBindingSet bindings, InputStreamHandle handle, long start, long pageLength, Transaction tx, boolean includeInferred, String baseURI) throws JsonProcessingException {
SPARQLQueryDefinition qdef = sparqlManager.newQueryDefinition(queryString);
if(notNull(baseURI) && baseURI != ""){ qdef.setBaseUri(baseURI);}
if (notNull(ruleset)){qdef.setRulesets(ruleset);}
if (notNull(getConstrainingQueryDefinition())) {qdef.setConstrainingQueryDefinition(getConstrainingQueryDefinition());}
qdef.setIncludeDefaultRulesets(includeInferred);
if(notNull(graphPerms)){ qdef.setUpdatePermissions(graphPerms);}
if(pageLength > 0){
sparqlManager.setPageLength(pageLength);
}else{
//sparqlManager.clearPageLength();
}
sparqlManager.executeSelect(qdef, handle, start, tx);
return handle.get();
}
/**
* executes GraphQuery
* @param queryString
* @param bindings
* @param tx
* @param includeInferred
* @param baseURI
* @return
* @throws JsonProcessingException
*/
public InputStream performGraphQuery(String queryString, SPARQLQueryBindingSet bindings, Transaction tx, boolean includeInferred, String baseURI) throws JsonProcessingException {
return performGraphQuery(queryString, bindings, new InputStreamHandle(), tx, includeInferred, baseURI);
}
/**
* executes GraphQuery
*
* @param queryString
* @param bindings
* @param handle
* @param tx
* @param includeInferred
* @param baseURI
* @return
* @throws JsonProcessingException
*/
public InputStream performGraphQuery(String queryString, SPARQLQueryBindingSet bindings, InputStreamHandle handle, Transaction tx, boolean includeInferred, String baseURI) throws JsonProcessingException {
SPARQLQueryDefinition qdef = sparqlManager.newQueryDefinition(queryString);
if(notNull(baseURI) && baseURI != ""){ qdef.setBaseUri(baseURI);}
if (notNull(ruleset)) {qdef.setRulesets(ruleset);}
if (notNull(getConstrainingQueryDefinition())){qdef.setConstrainingQueryDefinition(getConstrainingQueryDefinition());}
if(notNull(graphPerms)){ qdef.setUpdatePermissions(graphPerms);}
qdef.setIncludeDefaultRulesets(includeInferred);
sparqlManager.executeDescribe(qdef, handle, tx);
return handle.get();
}
/**
* executes BooleanQuery
*
* @param queryString
* @param bindings
* @param tx
* @param includeInferred
* @param baseURI
* @return
*/
public boolean performBooleanQuery(String queryString, SPARQLQueryBindingSet bindings, Transaction tx, boolean includeInferred, String baseURI) {
SPARQLQueryDefinition qdef = sparqlManager.newQueryDefinition(queryString);
if(notNull(baseURI) && baseURI != ""){ qdef.setBaseUri(baseURI);}
qdef.setIncludeDefaultRulesets(includeInferred);
if (notNull(ruleset)) {qdef.setRulesets(ruleset);}
if (notNull(getConstrainingQueryDefinition())){qdef.setConstrainingQueryDefinition(getConstrainingQueryDefinition());}
if(notNull(graphPerms)){ qdef.setUpdatePermissions(graphPerms);}
return sparqlManager.executeAsk(qdef,tx);
}
/**
* executes UpdateQuery
*
* @param queryString
* @param bindings
* @param tx
* @param includeInferred
* @param baseURI
*/
public void performUpdateQuery(String queryString, SPARQLQueryBindingSet bindings, Transaction tx, boolean includeInferred, String baseURI) {
SPARQLQueryDefinition qdef = sparqlManager.newQueryDefinition(queryString);
if(notNull(baseURI) && baseURI != ""){ qdef.setBaseUri(baseURI);}
if (notNull(ruleset) ) {qdef.setRulesets(ruleset);}
// constraining query unused when adding triple
//if (notNull(getConstrainingQueryDefinition())){qdef.setConstrainingQueryDefinition(getConstrainingQueryDefinition());}
if(notNull(graphPerms)){ qdef.setUpdatePermissions(graphPerms);}
qdef.setIncludeDefaultRulesets(includeInferred);
sparqlManager.clearPageLength();
sparqlManager.executeUpdate(qdef, tx);
}
/**
* executes merge of triples from File
*
* @param file
* @param baseURI
* @param dataFormat
* @param tx
* @param contexts
* @throws RDFParseException
*/
// performAdd
// as we use mergeGraphs, baseURI is always file.toURI
public void performAdd(File file, String baseURI, RDFFormat dataFormat, Transaction tx, Resource... contexts) throws RDFParseException {
try {
graphManager.setDefaultMimetype(dataFormat.getDefaultMIMEType());
if (dataFormat.equals(RDFFormat.NQUADS) || dataFormat.equals(RDFFormat.TRIG)) {
graphManager.mergeGraphs(new FileHandle(file),tx);
} else {
if (notNull(contexts) && contexts.length>0) {
for (int i = 0; i < contexts.length; i++) {
if(notNull(contexts[i])){
graphManager.mergeAs(contexts[i].toString(), new FileHandle(file), getGraphPerms(),tx);
}else{
graphManager.mergeAs(DEFAULT_GRAPH_URI, new FileHandle(file),getGraphPerms(), tx);
}
}
} else {
graphManager.mergeAs(DEFAULT_GRAPH_URI, new FileHandle(file), getGraphPerms(),tx);
}
}
} catch (FailedRequestException e) {
throw new RDFParseException("Request to MarkLogic server failed, check file and format.");
}
}
/**
* executes merge of triples from InputStream
*
* @param in
* @param baseURI
* @param dataFormat
* @param tx
* @param contexts
* @throws RDFParseException
*/
public void performAdd(InputStream in, String baseURI, RDFFormat dataFormat, Transaction tx, Resource... contexts) throws RDFParseException {
try {
graphManager.setDefaultMimetype(dataFormat.getDefaultMIMEType());
if (dataFormat.equals(RDFFormat.NQUADS) || dataFormat.equals(RDFFormat.TRIG)) {
graphManager.mergeGraphs(new InputStreamHandle(in),tx);
} else {
if (notNull(contexts) && contexts.length > 0) {
for (int i = 0; i < contexts.length; i++) {
if (notNull(contexts[i])) {
graphManager.mergeAs(contexts[i].toString(), new InputStreamHandle(in), getGraphPerms(), tx);
} else {
graphManager.mergeAs(DEFAULT_GRAPH_URI, new InputStreamHandle(in),getGraphPerms(), tx);
}
}
} else {
graphManager.mergeAs(DEFAULT_GRAPH_URI, new InputStreamHandle(in),getGraphPerms(), tx);
}
}
} catch (FailedRequestException e) {
throw new RDFParseException("Request to MarkLogic server failed, check input is valid.");
}
}
/**
* executes INSERT of single triple
*
* @param baseURI
* @param subject
* @param predicate
* @param object
* @param tx
* @param contexts
* @throws MarkLogicSesameException
*/
public void performAdd(String baseURI, Resource subject, URI predicate, Value object, Transaction tx, Resource... contexts) throws MarkLogicSesameException {
StringBuilder sb = new StringBuilder();
if(notNull(contexts) && contexts.length>0) {
if (notNull(baseURI)) sb.append("BASE <" + baseURI + ">\n");
sb.append("INSERT DATA { ");
for (int i = 0; i < contexts.length; i++) {
if (notNull(contexts[i])) {
sb.append("GRAPH <" + contexts[i].stringValue() + "> { ?s ?p ?o .} ");
} else {
sb.append("GRAPH <" + DEFAULT_GRAPH_URI + "> { ?s ?p ?o .} ");
}
}
sb.append("}");
} else {
sb.append("INSERT DATA { GRAPH <" + DEFAULT_GRAPH_URI + "> {?s ?p ?o .}}");
}
SPARQLQueryDefinition qdef = sparqlManager.newQueryDefinition(sb.toString());
if (notNull(ruleset) ) {qdef.setRulesets(ruleset);}
// constraining query unused when adding triple
//if (notNull(getConstrainingQueryDefinition())){qdef.setConstrainingQueryDefinition(getConstrainingQueryDefinition());}
if(notNull(graphPerms)){ qdef.setUpdatePermissions(graphPerms);}
if(notNull(baseURI) && baseURI != ""){ qdef.setBaseUri(baseURI);}
if(notNull(subject)) qdef.withBinding("s", subject.stringValue());
if(notNull(predicate)) qdef.withBinding("p", predicate.stringValue());
if(notNull(object)) bindObject(qdef, "o", object);
sparqlManager.executeUpdate(qdef, tx);
}
/**
* executes DELETE of single triple
*
* @param baseURI
* @param subject
* @param predicate
* @param object
* @param tx
* @param contexts
* @throws MarkLogicSesameException
*/
public void performRemove(String baseURI, Resource subject, URI predicate, Value object, Transaction tx, Resource... contexts) throws MarkLogicSesameException {
StringBuilder sb = new StringBuilder();
if(notNull(contexts) && contexts.length>0) {
if (notNull(baseURI))sb.append("BASE <" + baseURI + ">\n");
sb.append("DELETE WHERE { ");
for (int i = 0; i < contexts.length; i++) {
if (notNull(contexts[i])) {
sb.append("GRAPH <" + contexts[i].stringValue() + "> { ?s ?p ?o .} ");
} else {
sb.append("GRAPH <" + DEFAULT_GRAPH_URI + "> { ?s ?p ?o .} ");
}
}
sb.append("}");
}else{
sb.append("DELETE WHERE { GRAPH ?ctx { ?s ?p ?o .}}");
}
SPARQLQueryDefinition qdef = sparqlManager.newQueryDefinition(sb.toString());
if(notNull(baseURI) && baseURI != ""){ qdef.setBaseUri(baseURI);}
if(notNull(subject)) qdef.withBinding("s", subject.stringValue());
if(notNull(predicate)) qdef.withBinding("p", predicate.stringValue());
if(notNull(object)) bindObject(qdef, "o", object);
sparqlManager.executeUpdate(qdef, tx);
}
/**
* clears triples from named graph
*
* @param tx
* @param contexts
*/
public void performClear(Transaction tx, Resource... contexts) {
if(notNull(contexts)) {
for (int i = 0; i < contexts.length; i++) {
if (notNull(contexts[i])) {
graphManager.delete(contexts[i].stringValue(), tx);
} else {
graphManager.delete(DEFAULT_GRAPH_URI, tx);
}
}
}else{
graphManager.delete(DEFAULT_GRAPH_URI, tx);
}
}
/**
* clears all triples
*
* @param tx
*/
public void performClearAll(Transaction tx) {
graphManager.deleteGraphs();
}
/**
* getter rulesets
*
* @return
*/
public SPARQLRuleset[] getRulesets() {
return this.ruleset;
}
/**
* setter for rulesets, filters out nulls
*
* @param rulesets
*/
public void setRulesets(SPARQLRuleset ... rulesets) {
if(notNull(rulesets)) {
List list = new ArrayList<>();
for(Object r : rulesets) {
if(r != null && rulesets.length > 0) {
list.add((SPARQLRuleset)r);
}
}
this.ruleset = list.toArray(new SPARQLRuleset[list.size()]);;
}else{
this.ruleset = null;
}
}
/**
* setter for graph permissions
*
* @param graphPerms
*/
public void setGraphPerms(GraphPermissions graphPerms) {
this.graphPerms = graphPerms;
}
/**
* getter for graph permissions
*
* @return
*/
public GraphPermissions getGraphPerms() {
return this.graphPerms;
}
/**
* setter for ConstrainingQueryDefinition
*
* @param constrainingQueryDefinition
*/
public void setConstrainingQueryDefinition(QueryDefinition constrainingQueryDefinition) {
this.constrainingQueryDef = constrainingQueryDefinition;
}
/**
* getter for ConstrainingQueryDefinition
*
* @return
*/
public QueryDefinition getConstrainingQueryDefinition() {
return this.constrainingQueryDef;
}
///////////////////////////////////////////////////////////////////////////////////////////////
/**
* converts Sesame BindingSet to java api client SPARQLBindings
*
* @param bindings
* @return
*/
protected SPARQLBindings getSPARQLBindings(SPARQLQueryBindingSet bindings) {
SPARQLBindings sps = new SPARQLBindingsImpl();
for (Binding binding : bindings) {
sps.bind(binding.getName(), binding.getValue().stringValue());
}
return sps;
}
///////////////////////////////////////////////////////////////////////////////////////////////
/**
* bind object
*
* @param qdef
* @param variableName
* @param object
* @return
* @throws MarkLogicSesameException
*/
private static SPARQLQueryDefinition bindObject(SPARQLQueryDefinition qdef, String variableName, Value object) throws MarkLogicSesameException{
SPARQLBindings bindings = qdef.getBindings();
if(object != null){
if (object instanceof URI) {
bindings.bind(variableName, object.stringValue());
} else if (object instanceof Literal) {
Literal lit = (Literal) object;
if (lit.getLanguage() != null) {
String languageTag = lit.getLanguage();
bindings.bind(variableName, lit.getLabel(), Locale.forLanguageTag(languageTag));
}else if (((Literal) object).getDatatype() != null) {
try {
String xsdType = lit.getDatatype().toString();
String fragment = new java.net.URI(xsdType).getFragment();
bindings.bind(variableName,lit.getLabel(),RDFTypes.valueOf(fragment.toUpperCase()));
} catch (URISyntaxException e) {
throw new MarkLogicSesameException("Problem with object datatype.");
}
}else {
// assume we have a string value
bindings.bind(variableName, lit.getLabel(), RDFTypes.STRING);
}
}
qdef.setBindings(bindings);
}
return qdef;
}
/**
* tedious utility for checking if object is null or not
*
* @param item
* @return
*/
private static Boolean notNull(Object item) {
if (item!=null)
return true;
else
return false;
}
}