All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.marklogic.semantics.sesame.overview.html Maven / Gradle / Ivy

There is a newer version: 1.0.2.1
Show newest version

MarkLogic Sesame API Overview

MarkLogic Sesame

marklogic-sesame enables applications based on Sesame 2.8.6 to use MarkLogic as a persistence layer for triples, and as a source to query with SPARQL. In addition to basic support for graph CRUD and SPARQL query and update, marklogic-sesame also exposes the following MarkLogic capabilities to the Sesame framework:

  • Transactions
  • Variable bindings
  • Inference (ruleset configuration)
  • Combined MarkLogic and SPARQL queries
  • Optimized pagination of SPARQL result sets
  • Permissions

Before Starting

Ensure that you have the following information available for a MarkLogic instance:

  • hostname
  • port of an application server
  • credentials to read/write/administer the database as needed
If you need something to help you configure and deploy MarkLogic application servers, try ml-gradle. Note: If you are starting with 8.0-4 MarkLogic installation on your local machine, the configuration of ml-gradle out of the box will set up a test server for you.

The API

Sesame uses a {@link org.openrdf.repository.Repository} to represent a quad store and {@link org.openrdf.repository.RepositoryConnection} to connect to a quad store.

import com.marklogic.semantics.sesame.MarkLogicRepository;
import com.marklogic.semantics.sesame.MarkLogicRepositoryConnection;
import com.marklogic.semantics.sesame.query.MarkLogicTupleQuery;
import org.openrdf.model.Resource;
import org.openrdf.model.URI;
import org.openrdf.model.ValueFactory;
import org.openrdf.model.vocabulary.FOAF;
import org.openrdf.model.vocabulary.RDF;
import org.openrdf.model.vocabulary.RDFS;
import org.openrdf.model.vocabulary.XMLSchema;
import org.openrdf.query.*;
import org.openrdf.repository.RepositoryException;
import org.openrdf.rio.RDFFormat;
import org.openrdf.rio.RDFParseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.io.File;
import java.io.IOException;

public class Example1_Simple {

    protected static Logger logger =LoggerFactory.getLogger(Example1_Simple.class);

    public static void main(String... args) throws RepositoryException, IOException, RDFParseException, MalformedQueryException, QueryEvaluationException {

        // instantiate repository
        MarkLogicRepository repo = new MarkLogicRepository("localhost",8200,"admin","admin","DIGEST");
        repo.initialize();

        // get repository connection
        MarkLogicRepositoryConnection conn = repo.getConnection();

        // return number of triples contained in repository
        logger.info("number of triples: {}", conn.size());

        // add triples from a file
        File inputFile = new File("src/main/resources/testdata/test-small.owl");
        conn.add(inputFile, null, RDFFormat.RDFXML, (Resource) null);

        logger.info("number of triples: {}", conn.size());

        // add a few constructed triples
        Resource context1 = conn.getValueFactory().createURI("http://marklogic.com/examples/context1");
        Resource context2 = conn.getValueFactory().createURI("http://marklogic.com/examples/context2");
        ValueFactory f= conn.getValueFactory();
        String namespace = "http://example.org/";
        URI john = f.createURI(namespace, "john");
        conn.add(john, RDF.TYPE, FOAF.PERSON,context1);
        conn.add(john, RDFS.LABEL, f.createLiteral("John", XMLSchema.STRING),context2);

        // check if triples with subject john exist in repository
        String checkJohnQuery = "ASK {  ?p ?o .}";
        BooleanQuery booleanJohnQuery = conn.prepareBooleanQuery(QueryLanguage.SPARQL, checkJohnQuery);
        logger.info("result of query: {}",booleanJohnQuery.evaluate());

        // perform SPARQL query with pagination
        String queryString = "select * { ?s ?p ?o }";
        MarkLogicTupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
        tupleQuery.setIncludeInferred(true);
        TupleQueryResult results = tupleQuery.evaluate(1,10);

        // iterate through query results
        while(results.hasNext()){
            BindingSet bindings = results.next();
            logger.info("subject:{}",bindings.getValue("s"));
            logger.info("predicate:{}", bindings.getValue("p"));
            logger.info("object:{}", bindings.getValue("o"));
        }

        // clear all triples
        conn.clear();
        logger.info("number of triples: {}", conn.size());

        // close connection and shutdown repository
        conn.close();
        repo.shutDown();
    }
}

Add and Remove Triples Operation

Use Sesame's CRUD operations to store, retrieve, merge, or delete triples.

Resource context1 = conn.getValueFactory().createURI("http://marklogic.com/examples/context1");
Resource context2 = conn.getValueFactory().createURI("http://marklogic.com/examples/context2");
ValueFactory f= conn.getValueFactory();
String namespace = "http://example.org/";
URI john = f.createURI(namespace, "john");
conn.begin();
conn.add(john, RDF.TYPE, FOAF.PERSON,context1);
conn.add(john, RDFS.LABEL, f.createLiteral("John", XMLSchema.STRING),context2);
logger.info("statement exist:{}",,conn.hasStatement(john,RDF.TYPE,null,context1));
logger.info("total triples:{}",conn.size());
conn.commit();
conn.remove(john,RDF.TYPE,null,context1);
conn.remove(john,null,null,context1);
conn.clear();
logger.info("total triples:{} should be zero",conn.size());


SPARQL Queries

Perform SPARQL query with pagination.

// perform SPARQL query
String queryString = "select * { ?s ?p ?o }";
MarkLogicTupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);

// enable rulesets set on MarkLogic database
tupleQuery.setIncludeInferred(true);

// set base uri for resolving relative uris
tupleQuery.setBaseURI("http://www.example.org/base/");

// set rulesets for infererencing
tupleQuery.setRulesets(SPARQLRuleset.ALL_VALUES_FROM, SPARQLRuleset.HAS_VALUE);

// set a combined query
String combinedQuery =
        "{\"search\":" +
                "{\"qtext\":\"*\"}}";
RawCombinedQueryDefinition rawCombined = qmgr.newRawCombinedQueryDefinition(new StringHandle().with(combinedQuery).withFormat(Format.JSON));
tupleQuery.setConstrainingQueryDefinition(rawCombined);

// evaluate query with pagination
TupleQueryResult results = tupleQuery.evaluate(1,10);

// iterate through query results
while(results.hasNext()){
    BindingSet bindings = results.next();
    logger.info("subject:{}",bindings.getValue("s"));
    logger.info("predicate:{}", bindings.getValue("p"));
    logger.info("object:{}", bindings.getValue("o"));
}

SPARQL Update

// update query
String updatequery = "INSERT DATA { GRAPH  {     } }";
MarkLogicUpdateQuery updateQuery = conn.prepareUpdate(QueryLanguage.SPARQL, updatequery,"http://marklogic.com/test/baseuri");

// set perms to be applied to data
updateQuery.setGraphPerms(gmgr.permission("admin", Capability.READ).permission("admin", Capability.EXECUTE));

try {
updateQuery.execute();
} catch (UpdateExecutionException e) {
e.printStackTrace();
}





© 2015 - 2024 Weber Informatics LLC | Privacy Policy