All Downloads are FREE. Search and download functionalities are using the official Maven repository.

com.bigdata.rdf.sparql.ast.eval.TestInclude Maven / Gradle / Ivy

There is a newer version: 2.1.4
Show newest version
/**

Copyright (C) SYSTAP, LLC DBA Blazegraph 2006-2016.  All rights reserved.

Contact:
     SYSTAP, LLC DBA Blazegraph
     2501 Calvert ST NW #106
     Washington, DC 20008
     [email protected]

This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; version 2 of the License.

This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
GNU General Public License for more details.

You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA  02111-1307  USA
*/
/*
 * Created on Sep 4, 2011
 */

package com.bigdata.rdf.sparql.ast.eval;

import java.util.LinkedList;
import java.util.List;
import java.util.Properties;

import junit.framework.TestResult;
import junit.framework.TestSuite;

import com.bigdata.bop.BOpUtility;
import com.bigdata.bop.Constant;
import com.bigdata.bop.IBindingSet;
import com.bigdata.bop.IConstant;
import com.bigdata.bop.IVariable;
import com.bigdata.bop.PipelineOp;
import com.bigdata.bop.Var;
import com.bigdata.bop.bindingSet.ListBindingSet;
import com.bigdata.bop.engine.QueryEngine;
import com.bigdata.bop.fed.QueryEngineFactory;
import com.bigdata.bop.join.HashIndexOp;
import com.bigdata.bop.join.NestedLoopJoinOp;
import com.bigdata.bop.join.PipelineJoin;
import com.bigdata.bop.join.SolutionSetHashJoinOp;
import com.bigdata.bop.solutions.ProjectionOp;
import com.bigdata.bop.solutions.SliceOp;
import com.bigdata.journal.BufferMode;
import com.bigdata.journal.IBTreeManager;
import com.bigdata.rdf.internal.impl.literal.XSDNumericIV;
import com.bigdata.rdf.model.BigdataLiteral;
import com.bigdata.rdf.model.BigdataURI;
import com.bigdata.rdf.model.BigdataValue;
import com.bigdata.rdf.model.BigdataValueFactory;
import com.bigdata.rdf.sparql.ast.ASTContainer;
import com.bigdata.rdf.sparql.ast.NamedSubqueryRoot;
import com.bigdata.rdf.sparql.ast.eval.TestTCK.TCKStressTests;
import com.bigdata.rdf.sparql.ast.ssets.ISolutionSetManager;
import com.bigdata.rdf.sparql.ast.ssets.SolutionSetManager;
import com.bigdata.rdf.store.AbstractTripleStore;
import com.bigdata.rwstore.IRWStrategy;
import com.bigdata.rwstore.sector.MemStore;

/**
 * Data driven test suite for INCLUDE of named solution sets NOT generated by a
 * {@link NamedSubqueryRoot}.  This test suite is examines several details,
 * including the ability to locate and join with a pre-existing named solution
 * set, the ability to deliver the named solution set in order
 * 
 * @author Bryan Thompson
 * @version $Id: TestNamedSubQuery.java 6080 2012-03-07 18:38:55Z thompsonbry $
 */
public class TestInclude extends AbstractDataDrivenSPARQLTestCase {

    /**
     *
     */
    public TestInclude() {
    }

    /**
     * @param name
     */
    public TestInclude(String name) {
        super(name);
    }
    
    /**
     * Overridden to force the use of the {@link MemStore} since the solution
     * set cache is only enabled for {@link IRWStrategy} instances.
     */
    @Override
    public Properties getProperties() {

        // Note: clone to avoid modifying!!!
        final Properties properties = (Properties) super.getProperties().clone();

        properties.setProperty(com.bigdata.journal.Options.BUFFER_MODE,
                BufferMode.MemStore.name());
        
        return properties;
        
    }

    protected  IConstant asConst(final T val) {

        return new Constant(val);
        
    }
    
    /**
	 * This test populates a named solution set and then examines the ability to
	 * deliver a SLICE of that named solution set in the same order in which the
	 * data were stored. Normally, the named solution set would be created using
	 * INSERT INTO SOLUTIONS, but in this case we drop down a level
	 * and handle the creation of the named solution set in the test setup.
	 * 
	 * 
	 * SELECT ?x ?y WHERE { 
	 * 
	 *    # Turn off the join order optimizer.
	 *    hint:Query hint:optimizer "None" .
	 * 
	 *    # Run joins in the given order (INCLUDE is 2nd).
	 *    
	 *    # bind x => {Mike;Bryan}
	 *    ?x rdf:type foaf:Person .
	 *    
	 *    # join on (x) => {(x=Mike,y=2);(x=Bryan;y=4)} 
	 *    INCLUDE %solutionSet1 .
	 * 
	 * }
	 * 
*/ public void test_include_02() throws Exception { final TestHelper testHelper = new TestHelper( "include_02",// name "include_02.rq",// query URL "include_02.trig",// data URL "include_02.srx",// results URL true // check order(!) ); final AbstractTripleStore tripleStore = testHelper.getTripleStore(); final BigdataValueFactory vf = tripleStore.getValueFactory(); final QueryEngine queryEngine = QueryEngineFactory.getInstance() .getQueryController(tripleStore.getIndexManager()); final ISolutionSetManager sparqlCache = new SolutionSetManager( (IBTreeManager) queryEngine.getIndexManager(), tripleStore.getNamespace(), tripleStore.getTimestamp()); final String solutionSet = "%solutionSet1"; final IVariable x = Var.var("x"); final IVariable y = Var.var("y"); final IVariable z = Var.var("z"); final XSDNumericIV one = new XSDNumericIV( 1); one.setValue(vf.createLiteral(1)); final XSDNumericIV two = new XSDNumericIV( 2); // two.setValue(vf.createLiteral(2)); final XSDNumericIV three = new XSDNumericIV( 3); // three.setValue(vf.createLiteral(3)); final XSDNumericIV four = new XSDNumericIV( 4); four.setValue(vf.createLiteral(4)); final XSDNumericIV five = new XSDNumericIV( 5); five.setValue(vf.createLiteral(5)); final List bsets = new LinkedList(); { final IBindingSet bset = new ListBindingSet(); bset.set(x, asConst(one)); bset.set(y, asConst(two)); bsets.add(bset); } { final IBindingSet bset = new ListBindingSet(); bsets.add(bset); } { final IBindingSet bset = new ListBindingSet(); bset.set(x, asConst(three)); bset.set(y, asConst(four)); bset.set(z, asConst(five)); bsets.add(bset); } final IBindingSet[] bindingSets = bsets.toArray(new IBindingSet[]{}); sparqlCache.putSolutions(solutionSet, BOpUtility.asIterator(bindingSets)); final ASTContainer astContainer = testHelper.runTest(); final PipelineOp queryPlan = astContainer.getQueryPlan(); // top level should be the SLICE operator. assertTrue(queryPlan instanceof SliceOp); // sole argument should be the PROJECTION operator. final PipelineOp projectionOp = (PipelineOp) queryPlan.get(0); assertTrue(projectionOp instanceof ProjectionOp); // sole argument should be the INCLUDE operator. final PipelineOp includeOp = (PipelineOp) projectionOp.get(0); // the INCLUDE should be evaluated using a solution set SCAN. assertTrue(includeOp instanceof NestedLoopJoinOp); } /** * A unit test for an INCLUDE with another JOIN. For this test, the INCLUDE * will run first: * *
     * %solutionSet1::
     * {x=:Mike,  y=2}
     * {x=:Bryan, y=4}
     * {x=:DC,    y=1}
     * 
* *
     * prefix :  
     * prefix rdf:  
     * prefix rdfs:  
     * prefix foaf:  
     * 
     * SELECT ?x ?y WHERE { 
     * 
     *    # Turn off the join order optimizer.
     *    hint:Query hint:optimizer "None" .
     * 
     *    # Run joins in the given order (INCLUDE is 1st).
     *    
     *    # SCAN => {(x=Mike,y=2);(x=Bryan;y=4);(x=DC,y=1)} 
     *    INCLUDE %solutionSet1 .
     * 
     *    # JOIN on (x) => {(x=Mike,y=2);(x=Bryan,y=4)}
     *    ?x rdf:type foaf:Person .
     *    
     * }
     * 
* * Note: This excercises the code path in {@link AST2BOpUtility} where we do * a SCAN on the named solution set for the INCLUDE and then join with the * access path. * * @see #test_include_03() * * @see * SPARQL UPDATE for NAMED SOLUTION SETS */ public void test_include_03a() throws Exception { final TestHelper testHelper = new TestHelper( "include_03a",// name "include_03a.rq",// query URL "include_03.trig",// data URL "include_03.srx",// results URL false // check order ); final AbstractTripleStore tripleStore = testHelper.getTripleStore(); final BigdataValueFactory vf = tripleStore.getValueFactory(); final QueryEngine queryEngine = QueryEngineFactory.getInstance() .getQueryController(tripleStore.getIndexManager()); final ISolutionSetManager sparqlCache = new SolutionSetManager( (IBTreeManager) queryEngine.getIndexManager(), tripleStore.getNamespace(), tripleStore.getTimestamp()); final String solutionSet = "%solutionSet1"; final IVariable x = Var.var("x"); final IVariable y = Var.var("y"); // Resolve terms pre-loaded into the kb. final BigdataURI Mike = vf.createURI("http://www.bigdata.com/Mike"); final BigdataURI Bryan = vf.createURI("http://www.bigdata.com/Bryan"); final BigdataURI DC = vf.createURI("http://www.bigdata.com/DC"); { tripleStore.addTerms(new BigdataValue[] { Mike, Bryan, DC }); assertNotNull(Mike.getIV()); assertNotNull(Bryan.getIV()); assertNotNull(DC.getIV()); } final XSDNumericIV one = new XSDNumericIV( 1); one.setValue(vf.createLiteral(1)); final XSDNumericIV two = new XSDNumericIV( 2); two.setValue(vf.createLiteral(2)); // final XSDNumericIV three = new XSDNumericIV( // 3); // three.setValue(vf.createLiteral(3)); final XSDNumericIV four = new XSDNumericIV( 4); four.setValue(vf.createLiteral(4)); // final XSDNumericIV five = new XSDNumericIV( // 5); // five.setValue(vf.createLiteral(5)); final List bsets = new LinkedList(); { final IBindingSet bset = new ListBindingSet(); bset.set(x, asConst(Mike.getIV())); bset.set(y, asConst(two)); bsets.add(bset); } { final IBindingSet bset = new ListBindingSet(); bset.set(x, asConst(Bryan.getIV())); bset.set(y, asConst(four)); bsets.add(bset); } { final IBindingSet bset = new ListBindingSet(); bset.set(x, asConst(DC.getIV())); bset.set(y, asConst(one)); bsets.add(bset); } final IBindingSet[] bindingSets = bsets.toArray(new IBindingSet[]{}); sparqlCache.putSolutions(solutionSet, BOpUtility.asIterator(bindingSets)); final ASTContainer astContainer = testHelper.runTest(); final PipelineOp queryPlan = astContainer.getQueryPlan(); // top level should be the PROJECTION operator. final PipelineOp projectionOp = (PipelineOp) queryPlan; assertTrue(projectionOp instanceof ProjectionOp); // sole argument should be the PIPELINE JOIN operator. final PipelineOp joinOp = (PipelineOp) projectionOp.get(0); assertTrue(joinOp instanceof PipelineJoin); /* * The sole argument of JOIN should be the INCLUDE operator, which * should be evaluated using a solution set SCAN. This is where we start * evaluation for this query. */ final PipelineOp includeOp = (PipelineOp) joinOp.get(0); assertTrue(includeOp instanceof NestedLoopJoinOp); } /** * A unit test for an INCLUDE which is NOT the first JOIN in the WHERE * clause. This condition is enforced by turning off the join order * optimizer for this query. *

* Note: Since there is another JOIN in this query, there is no longer any * order guarantee for the resulting solutions. * *

     * %solutionSet1::
     * {x=:Mike,  y=2}
     * {x=:Bryan, y=4}
     * {x=:DC,    y=1}
     * 
* *
     * prefix :  
     * prefix rdf:  
     * prefix rdfs:  
     * prefix foaf:  
     * 
     * SELECT ?x ?y WHERE { 
     * 
     *    # Turn off the join order optimizer.
     *    hint:Query hint:optimizer "None" .
     * 
     *    # Run joins in the given order (INCLUDE is 2nd).
     *    
     *    # bind x => {Mike;Bryan}
     *    ?x rdf:type foaf:Person .
     *    
     *    # join on (x) => {(x=Mike,y=2);(x=Bryan;y=4)} 
     *    INCLUDE %solutionSet1 .
     * 
     * }
     * 
* * @see #test_include_03a() * * @see * SPARQL UPDATE for NAMED SOLUTION SETS */ public void test_include_03() throws Exception { final TestHelper testHelper = new TestHelper( "include_03",// name "include_03.rq",// query URL "include_03.trig",// data URL "include_03.srx",// results URL false // check order ); final AbstractTripleStore tripleStore = testHelper.getTripleStore(); final BigdataValueFactory vf = tripleStore.getValueFactory(); final QueryEngine queryEngine = QueryEngineFactory.getInstance() .getQueryController(tripleStore.getIndexManager()); final ISolutionSetManager sparqlCache = new SolutionSetManager( (IBTreeManager) queryEngine.getIndexManager(), tripleStore.getNamespace(), tripleStore.getTimestamp()); final String solutionSet = "%solutionSet1"; final IVariable x = Var.var("x"); final IVariable y = Var.var("y"); // Resolve terms pre-loaded into the kb. final BigdataURI Mike = vf.createURI("http://www.bigdata.com/Mike"); final BigdataURI Bryan = vf.createURI("http://www.bigdata.com/Bryan"); final BigdataURI DC = vf.createURI("http://www.bigdata.com/DC"); { tripleStore.addTerms(new BigdataValue[] { Mike, Bryan, DC }); assertNotNull(Mike.getIV()); assertNotNull(Bryan.getIV()); assertNotNull(DC.getIV()); } final XSDNumericIV one = new XSDNumericIV( 1); one.setValue(vf.createLiteral(1)); final XSDNumericIV two = new XSDNumericIV( 2); two.setValue(vf.createLiteral(2)); // final XSDNumericIV three = new XSDNumericIV( // 3); // three.setValue(vf.createLiteral(3)); final XSDNumericIV four = new XSDNumericIV( 4); four.setValue(vf.createLiteral(4)); // final XSDNumericIV five = new XSDNumericIV( // 5); // five.setValue(vf.createLiteral(5)); /** *
         * %solutionSet1::
         * {x=:Mike,  y=2}
         * {x=:Bryan, y=4}
         * {x=:DC,    y=1}
         * 
*/ final List bsets = new LinkedList(); { final IBindingSet bset = new ListBindingSet(); bset.set(x, asConst(Mike.getIV())); bset.set(y, asConst(two)); bsets.add(bset); } { final IBindingSet bset = new ListBindingSet(); bset.set(x, asConst(Bryan.getIV())); bset.set(y, asConst(four)); bsets.add(bset); } { final IBindingSet bset = new ListBindingSet(); bset.set(x, asConst(DC.getIV())); bset.set(y, asConst(one)); bsets.add(bset); } final IBindingSet[] bindingSets = bsets.toArray(new IBindingSet[]{}); sparqlCache.putSolutions(solutionSet, BOpUtility.asIterator(bindingSets)); final ASTContainer astContainer = testHelper.runTest(); /** * The plan should be: * * 1. A PipelineJoin for the initial triple pattern * * 2. A HashIndexOp to generate an appropriate index for the join with * the solution set. (The main point of this test is to verify that we * build an appropriate hash index to do the join rather than using a * NestedLoopJoinOp.) * * 3. A SolutionSetHashJoin * * 4. A ProjectionOp. */ // com.bigdata.bop.solutions.ProjectionOp[6](JVMSolutionSetHashJoinOp[5])[ com.bigdata.bop.BOp.bopId=6, com.bigdata.bop.BOp.evaluationContext=CONTROLLER, com.bigdata.bop.PipelineOp.sharedState=true, com.bigdata.bop.join.JoinAnnotations.select=[x, y], com.bigdata.bop.engine.QueryEngine.queryId=562dbadb-afcc-4a2c-bf70-2486f1061dc3] // com.bigdata.bop.join.JVMSolutionSetHashJoinOp[5](JVMHashIndexOp[4])[ com.bigdata.bop.BOp.bopId=5, com.bigdata.bop.BOp.evaluationContext=CONTROLLER, com.bigdata.bop.PipelineOp.sharedState=true, namedSetRef=NamedSolutionSetRef{queryId=562dbadb-afcc-4a2c-bf70-2486f1061dc3,namedSet=%solutionSet1,joinVars=[x]}, com.bigdata.bop.join.JoinAnnotations.constraints=null, class com.bigdata.bop.join.SolutionSetHashJoinOp.release=false] // com.bigdata.bop.join.JVMHashIndexOp[4](PipelineJoin[3])[ com.bigdata.bop.BOp.bopId=4, com.bigdata.bop.BOp.evaluationContext=CONTROLLER, com.bigdata.bop.PipelineOp.maxParallel=1, com.bigdata.bop.PipelineOp.lastPass=true, com.bigdata.bop.PipelineOp.sharedState=true, com.bigdata.bop.join.JoinAnnotations.joinType=Normal, com.bigdata.bop.join.HashJoinAnnotations.joinVars=[x], com.bigdata.bop.join.JoinAnnotations.select=null, namedSetSourceRef=NamedSolutionSetRef{queryId=562dbadb-afcc-4a2c-bf70-2486f1061dc3,namedSet=%solutionSet1,joinVars=[]}, namedSetRef=NamedSolutionSetRef{queryId=562dbadb-afcc-4a2c-bf70-2486f1061dc3,namedSet=%solutionSet1,joinVars=[x]}] // com.bigdata.bop.join.PipelineJoin[3]()[ com.bigdata.bop.BOp.bopId=3, com.bigdata.bop.join.JoinAnnotations.constraints=null, com.bigdata.bop.BOp.evaluationContext=ANY, com.bigdata.bop.join.AccessPathJoinAnnotations.predicate=SPOPredicate[1]] final PipelineOp queryPlan = astContainer.getQueryPlan(); // top level should be the PROJECTION operator. assertTrue(queryPlan instanceof ProjectionOp); // sole argument should be the SOLUTION SET HASH JOIN operator. final PipelineOp solutionSetHashJoinOp = (PipelineOp) queryPlan.get(0); assertTrue(solutionSetHashJoinOp instanceof SolutionSetHashJoinOp); // sole argument should be the HASH INDEX BUILD operator. final PipelineOp hashIndexOp = (PipelineOp) solutionSetHashJoinOp.get(0); assertTrue(hashIndexOp instanceof HashIndexOp); // sole argument should be the PIPELINE JOIN (triple pattern). final PipelineOp pipelineJoinOp = (PipelineOp) hashIndexOp.get(0); assertTrue(pipelineJoinOp instanceof PipelineJoin); } /** * Execute the stress tests a couple of times. * * @throws Exception */ public void test_stressTests() throws Exception { for (int i = 0; i < 100; i++) { final TestSuite suite = new TestSuite( IncludeStressTests.class.getSimpleName()); suite.addTestSuite(IncludeStressTests.class); suite.run(new TestResult()); } } /** * Tests to be executed in a stress test fashion, i.e. multiple times. * * @author msc */ public static class IncludeStressTests extends AbstractDataDrivenSPARQLTestCase { /** * */ public IncludeStressTests() { } /** * @param name */ public IncludeStressTests(String name) { super(name); } /** * This test populates a named solution set and then examines the ability to * deliver that named solution set in the same order in which the data were * stored. Normally, the named solution set would be created using * INSERT INTO SOLUTIONS, but in this case we drop down a level * and handle the creation of the named solution set in the test setup. * *
         * SELECT * WHERE { INCLUDE %solutionSet1 }
         * 
*/ public void test_include_01() throws Exception { final TestHelper testHelper = new TestHelper("include_01",// name "include_01.rq",// query URL "include_01.trig",// data URL "include_01.srx",// results URL true // check order(!) ); final AbstractTripleStore tripleStore = testHelper.getTripleStore(); final BigdataValueFactory vf = tripleStore.getValueFactory(); final QueryEngine queryEngine = QueryEngineFactory.getInstance() .getQueryController(tripleStore.getIndexManager()); final ISolutionSetManager sparqlCache = new SolutionSetManager( (IBTreeManager) queryEngine.getIndexManager(), tripleStore.getNamespace(), tripleStore.getTimestamp()); final String solutionSet = "%solutionSet1"; final IVariable x = Var.var("x"); final IVariable y = Var.var("y"); final IVariable z = Var.var("z"); final XSDNumericIV one = new XSDNumericIV(1); one.setValue(vf.createLiteral(1)); final XSDNumericIV two = new XSDNumericIV(2); final XSDNumericIV three = new XSDNumericIV(3); final XSDNumericIV four = new XSDNumericIV(4); four.setValue(vf.createLiteral(4)); final XSDNumericIV five = new XSDNumericIV(5); five.setValue(vf.createLiteral(5)); final List bsets = new LinkedList(); { final IBindingSet bset = new ListBindingSet(); bset.set(x, asConst(one)); bset.set(y, asConst(two)); bsets.add(bset); } { final IBindingSet bset = new ListBindingSet(); bsets.add(bset); } { final IBindingSet bset = new ListBindingSet(); bset.set(x, asConst(three)); bset.set(y, asConst(four)); bset.set(z, asConst(five)); bsets.add(bset); } final IBindingSet[] bindingSets = bsets.toArray(new IBindingSet[] {}); sparqlCache.putSolutions(solutionSet, BOpUtility.asIterator(bindingSets)); final ASTContainer astContainer = testHelper.runTest(); final PipelineOp queryPlan = astContainer.getQueryPlan(); // top level should be the PROJECTION operator. assertTrue(queryPlan instanceof ProjectionOp); // sole argument should be the INCLUDE operator. final PipelineOp includeOp = (PipelineOp) queryPlan.get(0); // the INCLUDE should be evaluated using a solution set SCAN. assertTrue(includeOp instanceof NestedLoopJoinOp); } protected IConstant asConst(final T val) { return new Constant(val); } } }




© 2015 - 2024 Weber Informatics LLC | Privacy Policy