http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstExternalProcessorTest.java
----------------------------------------------------------------------
diff --git 
a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstExternalProcessorTest.java
 
b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstExternalProcessorTest.java
new file mode 100644
index 0000000..b3638a5
--- /dev/null
+++ 
b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstExternalProcessorTest.java
@@ -0,0 +1,470 @@
+package mvm.rya.indexing.external.tupleSet;
+
+import static org.junit.Assert.*;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Set;
+
+import mvm.rya.indexing.external.ExternalProcessor;
+import 
mvm.rya.indexing.external.tupleSet.ExternalProcessorTest.ExternalTupleVstor;
+
+import org.junit.Assert;
+import org.junit.Test;
+import org.openrdf.query.algebra.Projection;
+import org.openrdf.query.algebra.QueryModelNode;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.helpers.StatementPatternCollector;
+import org.openrdf.query.parser.ParsedQuery;
+import org.openrdf.query.parser.sparql.SPARQLParser;
+
+import com.google.common.collect.Sets;
+
+public class VarConstExternalProcessorTest {
+
+    
+    
+    
+    String q15 = ""//
+            + "SELECT ?a ?b ?c ?d ?e ?f ?q " //
+            + "{" //
+            + " GRAPH ?x { " //
+            + "  ?a a ?b ."//
+            + "  ?b <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
+            + "  ?d <uri:talksTo> ?e . "//
+            + "  FILTER ( ?e < ?f && (?a > ?b || ?c = ?d) ). " //
+            + "  FILTER(bound(?f) && sameTerm(?a,?b)&&bound(?q)). " //
+            + "  ?b a ?q ."//
+            + "     }"//
+            + "}";//
+    
+    
+    
+    
+    String q17 = ""//
+            + "SELECT ?j ?k ?l ?m ?n ?o " //
+            + "{" //
+            + " GRAPH ?z { " //
+            + "  ?l a ?m. " //
+            + "  ?n a ?o. " //
+            + "  ?j <uri:talksTo> ?k . "//
+            + "  FILTER ( ?k < ?l && (?m > ?n || ?o = ?j) ). " //
+            + "     }"//
+            + "}";//
+    
+    String q18 = ""//
+            + "SELECT ?r ?s ?t ?u " //
+            + "{" //
+            + " GRAPH ?q { " //
+            + "  FILTER(bound(?r) && sameTerm(?s,?t)&&bound(?u)). " //
+            + "  ?t a ?u ."//
+            + "  ?s a ?r ."//
+            + "     }"//
+            + "}";//
+    
+    
+    
+    String q19 = ""//
+            + "SELECT ?a ?c ?d ?f ?q " //
+            + "{" //
+            + " GRAPH ?x { " //
+            + "  ?f a ?a ."//
+            + " \"3\" a ?c . "//
+            + "  ?d <uri:talksTo> \"5\" . "//
+            + "  FILTER ( \"5\" < ?f && (?a > \"3\" || ?c = ?d) ). " //
+            + "  FILTER(bound(?f) && sameTerm(?a,\"3\") && bound(?q)). " //
+            + "  \"3\" a ?q ."//
+            + "  ?a a ?f ."//
+            + "     }"//
+            + "}";//
+    
+   
+    
+    
+    
+    
+    String q21 = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+            + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  
"//
+            + "SELECT ?feature ?point " //
+            + "{" //
+            + "  ?feature a geo:Feature . "//
+            + "  ?feature geo:hasGeometry ?point . "//
+            + "  ?point a geo:Point . "//
+            + "  ?point geo:asWKT \"wkt\" . "//
+            + "  FILTER(geof:sfWithin(\"wkt\", \"Polygon\")) " //
+            + "}";//
+    
+    
+     String q22 = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
+             + "SELECT ?person " //
+             + "{" //
+             + "  ?person a <http://example.org/ontology/Person> . "//
+             + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> 
\"sally\" . "//
+             + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> 
\"john\" . "//
+             + "  FILTER(fts:text(\"sally\", \"bob\")) . " //
+             + "  FILTER(fts:text(\"john\", \"harry\"))  " //
+             + "  ?person <uri:hasName> \"bob\". "//
+             + "  ?person <uri:hasName> \"harry\". "//
+             + "}";//
+     
+     
+     String q23 = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX geof: 
<http://www.opengis.net/def/function/geosparql/>  "//
+                + "SELECT ?a ?b ?c ?d " //
+                + "{" //
+                + "  ?a a geo:Feature . "//
+                + "  ?b a geo:Point . "//
+                + "  ?b geo:asWKT ?c . "//
+                + "  FILTER(geof:sfWithin(?c, ?d)) " //
+                + "}";//
+     
+     
+     String q24 = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
+             + "SELECT ?f ?g ?h" //
+             + "{" //
+             + "  ?f <http://www.w3.org/2000/01/rdf-schema#label> ?g . "//
+             + "  FILTER(fts:text(?g,?h)).  " //
+             + " ?f <uri:hasName> ?h. " //
+             + "}";//
+     
+
+     String q25 = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
+             + "SELECT ?person ?point" //
+             + "{" //
+             + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> 
\"label\" . "//
+             + "  FILTER(fts:text(\"label\", \"bob\")) . " //
+             + "  ?person <uri:hasName> \"bob\" . " //
+             + "  ?person a ?point. " //
+             + "  \"bob\" a <http://example.org/ontology/Person> . "//
+             + "  ?person <http://www.w3.org/2000/01/rdf-schema#commentmatch> 
\"comment\" . "//
+             + "  FILTER((?person > ?point) || (?person = \"comment\")). "
+             + "  FILTER(fts:text(\"comment\", \"bob\"))  " //
+             + "}";//
+     
+     
+     String q26 = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
+             + "SELECT ?a ?b ?c ?d " //
+             + "{" //
+             + "  ?a a ?c. " //
+             + "  ?d a <http://example.org/ontology/Person> . "//
+             + "  ?a <http://www.w3.org/2000/01/rdf-schema#commentmatch> ?b . 
"//
+             + "  FILTER((?a > ?c) || (?a = ?b)). "
+             + "  FILTER(fts:text(?b, ?d)) . " //
+             + "}";//
+     
+     
+     
+     String q27 = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
+             + "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+             + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  
"//
+             + "SELECT ?person ?feature ?point " //
+             + "{" //
+             + "  ?person <http://www.w3.org/2000/01/rdf-schema#label> 
\"label\" . "//
+             + "  FILTER(fts:text(\"label\", \"bob\")) . " //
+             + "  ?person <uri:hasName> \"bob\" . " //
+             + "  ?person a ?point. " //
+             + "  \"bob\" a <http://example.org/ontology/Person> . "//
+             + "  ?person <http://www.w3.org/2000/01/rdf-schema#commentmatch> 
\"comment\" . "//
+             + "  FILTER((?person > ?point) || (?person = \"comment\")). "
+             + "  FILTER(fts:text(\"comment\", \"bob\"))  " //
+             + "  ?feature a geo:Feature . "//
+             + "  ?point a geo:Point . "//
+             + "  ?point geo:asWKT \"wkt\" . "//
+             + "  FILTER(geof:sfWithin(\"wkt\", \"Polygon\")) " //
+             + "}";//
+     
+     
+    
+    
+     String q28 = ""//
+             + "SELECT ?m ?n " //
+             + "{" //
+             + "  FILTER(?m IN (1,2,3) && ?n NOT IN(5,6,7)). " //
+             + "  ?n <http://www.w3.org/2000/01/rdf-schema#label> ?m. "//
+             + "}";//
+    
+    
+    
+    
+    
+    
+    
+    @Test
+    public void testContextFilterFourIndex() throws Exception {
+
+        SPARQLParser parser1 = new SPARQLParser();
+        SPARQLParser parser3 = new SPARQLParser();
+        SPARQLParser parser4 = new SPARQLParser();
+
+        ParsedQuery pq1 = parser1.parseQuery(q19, null);
+        ParsedQuery pq3 = parser3.parseQuery(q17, null);
+        ParsedQuery pq4 = parser4.parseQuery(q18, null);
+   
+
+        System.out.println("Query is " + pq1.getTupleExpr());
+        System.out.println("Indexes are " + pq3.getTupleExpr()+ " , " 
+pq4.getTupleExpr());
+        
+ 
+        SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new 
Projection(pq3.getTupleExpr()));
+        SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet(new 
Projection(pq4.getTupleExpr()));
+        
+
+        List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
+       
+        list.add(extTup3);
+        list.add(extTup2);
+     
+
+        ExternalProcessor processor = new ExternalProcessor(list);
+        
+        TupleExpr tup = processor.process(pq1.getTupleExpr());
+
+        System.out.println("Processed query is " + tup);
+          
+        Set<StatementPattern> qSet = 
Sets.newHashSet(StatementPatternCollector.process(pq1.getTupleExpr()));
+        
+        ExternalTupleVstor eTup = new ExternalTupleVstor();
+        tup.visit(eTup);
+        Set<QueryModelNode> eTupSet =  eTup.getExtTup();
+        
+        Assert.assertEquals(2, eTupSet.size());
+        
+        Set<StatementPattern> set = Sets.newHashSet();
+        
+        for (QueryModelNode s : eTupSet) {
+            Set<StatementPattern> tempSet = 
Sets.newHashSet(StatementPatternCollector.process(((ExternalTupleSet) s)
+                    .getTupleExpr()));
+            set.addAll(tempSet);
+
+        }
+        
+        
+        Assert.assertTrue(qSet.containsAll(set));
+    }
+    
+    
+    
+    
+    @Test
+    public void testGeoIndexFunction() throws Exception {
+
+        SPARQLParser parser1 = new SPARQLParser();
+        SPARQLParser parser2 = new SPARQLParser();
+
+        ParsedQuery pq1 = parser1.parseQuery(q21, null);
+        ParsedQuery pq2 = parser2.parseQuery(q23, null);
+
+        System.out.println("Query is " + pq1.getTupleExpr());
+        System.out.println("Index is " + pq2.getTupleExpr());
+
+        
+        SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new 
Projection(pq2.getTupleExpr()));
+        
+
+        List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
+        list.add(extTup);
+
+        
+        ExternalProcessor processor = new ExternalProcessor(list);
+        
+        TupleExpr tup = processor.process(pq1.getTupleExpr());
+
+        System.out.println("Processed query is " + tup);
+        
+        Set<StatementPattern> qSet = 
Sets.newHashSet(StatementPatternCollector.process(pq1.getTupleExpr()));
+        
+        
+        ExternalTupleVstor eTup = new ExternalTupleVstor();
+        tup.visit(eTup);
+        Set<QueryModelNode> eTupSet =  eTup.getExtTup();
+        
+        Set<StatementPattern> set = Sets.newHashSet();
+        
+        Assert.assertEquals(1, eTupSet.size());
+        
+        for (QueryModelNode s : eTupSet) {
+            Set<StatementPattern> tempSet = 
Sets.newHashSet(StatementPatternCollector.process(((ExternalTupleSet) s)
+                    .getTupleExpr()));
+            set.addAll(tempSet);
+
+        }
+        
+        
+        
+        Assert.assertTrue(qSet.containsAll(set));
+
+    }
+    
+    
+    
+    @Test
+    public void testFreeTestIndexFunction() throws Exception {
+
+        SPARQLParser parser1 = new SPARQLParser();
+        SPARQLParser parser2 = new SPARQLParser();
+
+        ParsedQuery pq1 = parser1.parseQuery(q22, null);
+        ParsedQuery pq2 = parser2.parseQuery(q24, null);
+
+        System.out.println("Query is " + pq1.getTupleExpr());
+        System.out.println("Index is " + pq2.getTupleExpr());
+
+        
+        SimpleExternalTupleSet extTup = new SimpleExternalTupleSet(new 
Projection(pq2.getTupleExpr()));
+        
+
+        List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
+        list.add(extTup);
+
+        ExternalProcessor processor = new ExternalProcessor(list);
+        
+        TupleExpr tup = processor.process(pq1.getTupleExpr());
+
+        System.out.println("Processed query is " + tup);
+        
+        Set<StatementPattern> qSet = 
Sets.newHashSet(StatementPatternCollector.process(pq1.getTupleExpr()));
+        
+        
+        ExternalTupleVstor eTup = new ExternalTupleVstor();
+        tup.visit(eTup);
+        Set<QueryModelNode> eTupSet =  eTup.getExtTup();
+        
+        Set<StatementPattern> set = Sets.newHashSet();
+        
+        Assert.assertEquals(2, eTupSet.size());
+        
+        for (QueryModelNode s : eTupSet) {
+            Set<StatementPattern> tempSet = 
Sets.newHashSet(StatementPatternCollector.process(((ExternalTupleSet) s)
+                    .getTupleExpr()));
+            set.addAll(tempSet);
+
+        }
+        
+        
+        Assert.assertTrue(qSet.containsAll(set));
+
+    }
+    
+    
+    @Test
+    public void testThreeIndexGeoFreeCompareFilterMix() throws Exception {
+
+        SPARQLParser parser1 = new SPARQLParser();
+        SPARQLParser parser2 = new SPARQLParser();
+        SPARQLParser parser3 = new SPARQLParser();
+
+        ParsedQuery pq1 = parser1.parseQuery(q25, null);
+        ParsedQuery pq2 = parser2.parseQuery(q24, null);
+        ParsedQuery pq3 = parser3.parseQuery(q26, null);
+
+        System.out.println("Query is " + pq1.getTupleExpr());
+        System.out.println("Indexes are " + pq2.getTupleExpr() + " and " + 
pq3.getTupleExpr());
+
+        
+        SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new 
Projection(pq2.getTupleExpr()));
+        SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new 
Projection(pq3.getTupleExpr()));
+
+        List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
+        list.add(extTup1);
+        list.add(extTup2);
+
+        
+        ExternalProcessor processor = new ExternalProcessor(list);
+        
+        TupleExpr tup = processor.process(pq1.getTupleExpr());
+
+        System.out.println("Processed query is " + tup);
+        
+        Set<StatementPattern> qSet = 
Sets.newHashSet(StatementPatternCollector.process(pq1.getTupleExpr()));
+        
+        ExternalTupleVstor eTup = new ExternalTupleVstor();
+        tup.visit(eTup);
+        Set<QueryModelNode> eTupSet =  eTup.getExtTup();
+        Set<StatementPattern> set = Sets.newHashSet();
+        
+        Assert.assertEquals(2, eTupSet.size());
+        
+        for (QueryModelNode s : eTupSet) {
+            Set<StatementPattern> tempSet = 
Sets.newHashSet(StatementPatternCollector.process(((ExternalTupleSet) s)
+                    .getTupleExpr()));
+            set.addAll(tempSet);
+
+        }
+        
+        
+        Assert.assertTrue(qSet.containsAll(set));
+
+    }
+    
+    
+    
+    
+    
+    @Test
+    public void testFourIndexGeoFreeCompareFilterMix() throws Exception {
+
+        SPARQLParser parser1 = new SPARQLParser();
+        SPARQLParser parser2 = new SPARQLParser();
+        SPARQLParser parser3 = new SPARQLParser();
+        SPARQLParser parser4 = new SPARQLParser();
+      
+
+        ParsedQuery pq1 = parser1.parseQuery(q27, null);
+        ParsedQuery pq2 = parser2.parseQuery(q23, null);
+        ParsedQuery pq3 = parser3.parseQuery(q26, null);
+        ParsedQuery pq4 = parser4.parseQuery(q24, null);
+        
+        System.out.println("Query is " + pq1.getTupleExpr());
+        System.out.println("Indexes are " + pq2.getTupleExpr() + " , " + 
pq3.getTupleExpr() + " , " + pq4.getTupleExpr());
+
+        
+        SimpleExternalTupleSet extTup1 = new SimpleExternalTupleSet(new 
Projection(pq2.getTupleExpr()));
+        SimpleExternalTupleSet extTup2 = new SimpleExternalTupleSet(new 
Projection(pq3.getTupleExpr()));
+        SimpleExternalTupleSet extTup3 = new SimpleExternalTupleSet(new 
Projection(pq4.getTupleExpr()));
+
+
+
+        List<ExternalTupleSet> list = new ArrayList<ExternalTupleSet>();
+
+        list.add(extTup1);
+        list.add(extTup2);
+        list.add(extTup3);
+
+        
+        ExternalProcessor processor = new ExternalProcessor(list);
+        
+        TupleExpr tup = processor.process(pq1.getTupleExpr());
+
+        System.out.println("Processed query is " + tup);
+        
+        Set<StatementPattern> qSet = 
Sets.newHashSet(StatementPatternCollector.process(pq1.getTupleExpr()));
+        
+        ExternalTupleVstor eTup = new ExternalTupleVstor();
+        tup.visit(eTup);
+        Set<QueryModelNode> eTupSet =  eTup.getExtTup();
+        Set<StatementPattern> set = Sets.newHashSet();
+        
+        Assert.assertEquals(3, eTupSet.size());
+        
+        for (QueryModelNode s : eTupSet) {
+            Set<StatementPattern> tempSet = 
Sets.newHashSet(StatementPatternCollector.process(((ExternalTupleSet) s)
+                    .getTupleExpr()));
+            set.addAll(tempSet);
+
+        }
+        
+        
+        Assert.assertTrue(qSet.containsAll(set));
+
+
+
+    }
+    
+    
+    
+    
+    
+    
+    
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstQueryVariableNormalizerTest.java
----------------------------------------------------------------------
diff --git 
a/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstQueryVariableNormalizerTest.java
 
b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstQueryVariableNormalizerTest.java
new file mode 100644
index 0000000..07cdb6b
--- /dev/null
+++ 
b/extras/indexing/src/test/java/mvm/rya/indexing/external/tupleSet/VarConstQueryVariableNormalizerTest.java
@@ -0,0 +1,727 @@
+package mvm.rya.indexing.external.tupleSet;
+
+import java.util.List;
+import java.util.Set;
+
+import mvm.rya.indexing.external.QueryVariableNormalizer;
+
+import org.junit.Assert;
+import org.junit.Test;
+import org.openrdf.query.algebra.Filter;
+import org.openrdf.query.algebra.QueryModelNode;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+import org.openrdf.query.algebra.helpers.StatementPatternCollector;
+import org.openrdf.query.parser.ParsedQuery;
+import org.openrdf.query.parser.sparql.SPARQLParser;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
+public class VarConstQueryVariableNormalizerTest {
+
+    private String query1 = " " //
+            + "SELECT ?person ?address ?otherValue" //
+            + "{"  //
+            + "?person a <uri:Person>. " //
+            + "?person <uri:hasName> <uri:name>."//
+            + "?person <uri:hasAddress> ?address." //
+            + "?person <uri:blah> ?otherValue" //
+            + "}"; //
+
+    private String index1 = " " //
+            + "SELECT ?X ?Y ?Z ?W" //
+            + "{"//
+            + "?X a <uri:Person>.  " //
+            + "?X <uri:hasName> ?Y."//
+            + "?X <uri:hasAddress> ?Z." //
+            + "?X <uri:blah> ?W" //
+            + "}"; //
+    
+    
+    
+    private String q4 = ""//
+            + "SELECT ?s ?t ?u " //
+            + "{" //
+            + "  ?s a ?t . "//
+            + "  ?t <http://www.w3.org/2000/01/rdf-schema#label> ?u "//
+            + "}";//
+    
+    
+    
+    private String q7 = ""//
+            + "SELECT ?s ?t ?u ?x ?y ?z " //
+            + "{" //
+            + "  ?s a ?t ."//
+            + "  ?x a ?y ."//
+            + "  ?t <http://www.w3.org/2000/01/rdf-schema#label> ?u ."//
+            + "  ?y <http://www.w3.org/2000/01/rdf-schema#label> ?z ."//
+            + "}";//
+
+    private String q8 = ""//
+            + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?h ?r " //
+            + "{" //
+            + "  ?f a ?m ."//
+            + "  ?e a ?l ."//
+            + "  ?n a ?o ."//
+            + "  ?a a ?h ."//
+            + "  ?m <http://www.w3.org/2000/01/rdf-schema#label> ?d ."//
+            + "  ?l <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
+            + "  ?o <http://www.w3.org/2000/01/rdf-schema#label> ?p ."//
+            + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> ?r ."//
+            + "  ?f <uri:talksTo> ?m . "//
+            + "  ?m <uri:talksTo> ?a . "//
+            + "  ?o <uri:talksTo> ?r . "//
+            + "}";//
+
+    private String q9 = ""//
+            + "SELECT ?f  ?d ?e ?c ?n ?p ?a ?r " //
+            + "{" //
+            + "  ?f a <uri:dog> ."//
+            + "  ?e a <uri:chicken> ."//
+            + "  ?n a <uri:cow> ."//
+            + "  ?a a <uri:elephant> ."//
+            + "  <uri:dog> <http://www.w3.org/2000/01/rdf-schema#label> ?d ."//
+            + "  <uri:chicken> <http://www.w3.org/2000/01/rdf-schema#label> ?c 
."//
+            + "  <uri:cow> <http://www.w3.org/2000/01/rdf-schema#label> ?p ."//
+            + "  <uri:elephant> <http://www.w3.org/2000/01/rdf-schema#label> 
?r ."//
+            + "  ?d <uri:talksTo> ?f . "//
+            + "  ?c <uri:talksTo> ?e . "//
+            + "  ?p <uri:talksTo> ?n . "//
+            + "  ?r <uri:talksTo> ?a . "//
+            + "}";//
+
+    private String q10 = ""//
+            + "SELECT ?f ?m ?d " //
+            + "{" //
+            + "  ?f a ?m ."//
+            + "  ?m <http://www.w3.org/2000/01/rdf-schema#label> ?d ."//
+            + "  ?d <uri:talksTo> ?f . "//
+            + "}";//
+    
+    String q15 = ""//
+            + "SELECT ?x ?y ?z ?w " //
+            + "{" //
+            + "  ?x ?y ?z ."//
+            + "  ?y ?z ?w ."//
+            + "}";//
+
+    String q16 = ""//
+            + "SELECT ?a ?b ?c " //
+            + "{" //
+            + "  ?a ?b ?c ."//
+            + "}";//
+    
+    String q17 = ""//
+            + "SELECT ?q ?r " //
+            + "{" //
+            + "  ?q ?r \"url:\" ."//
+            + "}";//
+    
+    private String q18 = ""//
+            + "SELECT ?f ?m ?d ?e ?l ?c ?n ?o ?p ?a ?r " //
+            + "{" //
+            + "  ?f a ?m ."//
+            + "  ?e a ?l ."//
+            + "  ?n a ?o ."//
+            + "  ?a a <uri:elephant> ."//
+            + "  ?m <http://www.w3.org/2000/01/rdf-schema#label> ?d ."//
+            + "  ?l <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
+            + "  ?o <http://www.w3.org/2000/01/rdf-schema#label> ?p ."//
+            + "  <uri:elephant> <http://www.w3.org/2000/01/rdf-schema#label> 
?r ."//
+            + "  ?d <uri:talksTo> ?f . "//
+            + "  ?c <uri:talksTo> ?e . "//
+            + "  ?p <uri:talksTo> ?n . "//
+            + "  ?r <uri:talksTo> ?a . "//
+            + "}";//
+    
+    
+    String q32 = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+            + "PREFIX geof: <http://www.opengis.net/def/function/geosparql/>  
"//
+            + "SELECT ?feature ?point " //
+            + "{" //
+            + "  ?feature a geo:Feature . "//
+            + "  ?feature geo:hasGeometry ?point . "//
+            + "  ?point a geo:Point . "//
+            + "  ?point geo:asWKT \"wkt\" . "//
+            + "  FILTER(geof:sfWithin(\"wkt\", \"Polygon\")) " //
+            + "}";//
+    
+    
+     String q33 = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
+             + "SELECT ?person ?commentmatch ?labelmatch" //
+             + "{" //
+             + "  ?person a <http://example.org/ontology/Person> . "//
+             + "  ?person <http://www.w3.org/2000/01/rdf-schema#comment> 
?labelmatch . "//
+             + "  ?person <http://www.w3.org/2000/01/rdf-schema#comment> 
?commentmatch . "//
+             + "  FILTER(fts:text(?labelmatch, \"sally\")) . " //
+             + "  FILTER(fts:text(?commentmatch, \"bob\"))  " //
+             + "}";//
+     
+     
+     String q34 = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX geof: 
<http://www.opengis.net/def/function/geosparql/>  "//
+                + "SELECT ?a ?b ?c ?d" //
+                + "{" //
+                + "  ?a a geo:Feature . "//
+                + "  ?b a geo:Point . "//
+                + "  ?b geo:asWKT ?c . "//
+                + "  FILTER(geof:sfWithin(?c, ?d)) " //
+                + "}";//
+     
+     
+     String q35 = "PREFIX fts: <http://rdf.useekm.com/fts#>  "//
+             + "SELECT ?a ?b ?c" //
+             + "{" //
+             + "  ?a <http://www.w3.org/2000/01/rdf-schema#comment> ?b . "//
+             + "  FILTER(fts:text(?b, ?c))  " //
+             + "}";//
+     
+
+    
+    
+    
+    
+    
+    
+    /**
+     * @param tuple1
+     * @param tuple2
+     * @return
+     * @throws Exception
+     */
+    public boolean tupleEquals(TupleExpr tuple1, TupleExpr tuple2) throws 
Exception {
+        
+        Set<StatementPattern> spSet1 = 
Sets.newHashSet(StatementPatternCollector.process(tuple1));
+        Set<StatementPattern> spSet2 = 
Sets.newHashSet(StatementPatternCollector.process(tuple2));
+       
+        return spSet1.equals(spSet2);
+
+    }
+
+    /**
+     * @param tuple1
+     * @param tuple2
+     * @return
+     * @throws Exception
+     */
+    public boolean isTupleSubset(TupleExpr tuple1, TupleExpr tuple2) throws 
Exception {
+
+        Set<StatementPattern> spSet1 = 
Sets.newHashSet(StatementPatternCollector.process(tuple1));
+        Set<StatementPattern> spSet2 = 
Sets.newHashSet(StatementPatternCollector.process(tuple2));
+
+        return (Sets.intersection(spSet1, spSet2).equals(spSet2));
+
+    }
+    
+    
+
+
+    /**
+     * @throws Exception
+     *             Tests QueryVariableNormalizerContext with two queries whose
+     *             StatementPattern nodes contain no constant Vars.
+     */
+    @Test
+    public void testNoConstants() throws Exception {
+
+        SPARQLParser parser1 = new SPARQLParser();
+        SPARQLParser parser2 = new SPARQLParser();
+
+        ParsedQuery pq1 = parser1.parseQuery(q15, null);
+        ParsedQuery pq2 = parser2.parseQuery(q16, null);
+
+        List<TupleExpr> normalize = 
QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
+                pq2.getTupleExpr());
+
+        Assert.assertEquals(2,normalize.size());
+        for (TupleExpr s : normalize) {
+            Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), s));
+        }
+
+        pq1 = parser1.parseQuery(q16, null);
+        pq2 = parser2.parseQuery(q17, null);
+        normalize = 
QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(), 
pq2.getTupleExpr());
+
+        Assert.assertTrue(normalize.size() == 0);
+
+    }
+    
+
+    
+        
+    
+    @Test
+    public void queryConstantNodeOneMatch() throws Exception {
+
+        SPARQLParser p = new SPARQLParser();
+
+        ParsedQuery pq1 = p.parseQuery(query1, null);
+        ParsedQuery pq2 = p.parseQuery(index1, null);
+        
+
+        List<TupleExpr> normalize = 
QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
+                pq2.getTupleExpr());
+               
+        Assert.assertEquals(1, normalize.size());
+        
+        for(TupleExpr te: normalize) {
+            Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
+        }        
+    }
+    
+    
+    
+    /**
+     * @throws Exception
+     *             Tests QueryVariableNormalizerContext on the large query q9
+     *             with with a smaller, potential index q10 to see if the
+     *             correct number of outputs are produced.
+     */
+    @Test
+    public void querConstNodeFourMatch() throws Exception {
+
+        SPARQLParser parser1 = new SPARQLParser();
+        SPARQLParser parser2 = new SPARQLParser();
+
+        ParsedQuery pq1 = parser1.parseQuery(q9, null);
+        ParsedQuery pq2 = parser2.parseQuery(q10, null);
+
+        List<TupleExpr> normalize = 
QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
+                pq2.getTupleExpr());
+
+        
+        //System.out.println(normalize);
+        
+        Assert.assertEquals(4, normalize.size());
+        
+        for(TupleExpr te: normalize) {
+            Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
+        }
+        
+
+        
+
+    }
+    
+    
+    @Test
+    public void queryConstNodeSixMatch() throws Exception {
+
+        SPARQLParser parser1 = new SPARQLParser();
+        SPARQLParser parser2 = new SPARQLParser();
+
+        ParsedQuery pq1 = parser1.parseQuery(q9, null);
+        ParsedQuery pq2 = parser2.parseQuery(q18, null);
+
+        List<TupleExpr> normalize = 
QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
+                pq2.getTupleExpr());
+
+        
+        Assert.assertEquals(6, normalize.size());
+        
+        //System.out.println("tuple expr is " +pq1.getTupleExpr() + " and 
normalized tuples are " + normalize);
+        
+        for(TupleExpr te: normalize) {
+            Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
+        }
+        
+
+    }
+    
+    
+    
+    @Test
+    public void queryConstGeoFilter() throws Exception {
+
+        SPARQLParser parser1 = new SPARQLParser();
+        SPARQLParser parser2 = new SPARQLParser();
+
+        ParsedQuery pq1 = parser1.parseQuery(q32, null);
+        ParsedQuery pq2 = parser2.parseQuery(q34, null);
+
+        
+        List<TupleExpr> normalize = 
QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
+                pq2.getTupleExpr());
+
+        
+    
+        Assert.assertEquals(1, normalize.size());
+        
+        for(TupleExpr te: normalize) {
+            Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
+        }
+        
+        
+        
+        FilterCollector fc1 = new FilterCollector();
+        pq1.getTupleExpr().visit(fc1);
+        List<QueryModelNode> fList1 = fc1.getFilters();
+        
+        for(TupleExpr te: normalize) {
+            FilterCollector fc2 = new FilterCollector();
+            te.visit(fc2);
+            List<QueryModelNode> fList2 = fc2.getFilters();
+            
+            for(QueryModelNode q: fList2) {
+                Assert.assertTrue(fList1.contains(q));
+            }
+        }
+        
+
+    }
+    
+    
+    @Test
+    public void queryConstFreeTextFilter() throws Exception {
+
+        SPARQLParser parser1 = new SPARQLParser();
+        SPARQLParser parser2 = new SPARQLParser();
+
+        ParsedQuery pq1 = parser1.parseQuery(q33, null);
+        ParsedQuery pq2 = parser2.parseQuery(q35, null);
+        
+        System.out.println(pq1.getTupleExpr());
+        
+        List<TupleExpr> normalize = 
QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
+                pq2.getTupleExpr());
+        
+        
+        
+        Assert.assertEquals(2, normalize.size());
+        
+        for(TupleExpr te: normalize) {
+            Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
+        }
+        
+        
+        
+        FilterCollector fc1 = new FilterCollector();
+        pq1.getTupleExpr().visit(fc1);
+        List<QueryModelNode> fList1 = fc1.getFilters();
+        
+        for(TupleExpr te: normalize) {
+            FilterCollector fc2 = new FilterCollector();
+            te.visit(fc2);
+            List<QueryModelNode> fList2 = fc2.getFilters();
+            
+            for(QueryModelNode q: fList2) {
+                Assert.assertTrue(fList1.contains(q));
+            }
+        }
+        
+        
+        
+
+    }
+    
+    
+    
+    
+    
+    @Test
+    public void queryConstNodeTwoMatch() throws Exception {
+
+        SPARQLParser parser1 = new SPARQLParser();
+        SPARQLParser parser2 = new SPARQLParser();
+
+        ParsedQuery pq1 = parser1.parseQuery(q7, null);
+        ParsedQuery pq2 = parser2.parseQuery(q4, null);
+
+        List<TupleExpr> normalize = 
QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
+                pq2.getTupleExpr());
+
+        
+        Assert.assertEquals(2, normalize.size());
+        
+        for(TupleExpr te: normalize) {
+            Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
+        }
+        
+
+
+
+
+    }
+    
+   
+    
+    
+    
+    
+    
+    
+    @Test
+    public void queryNAryListMatch() throws Exception {
+
+        
+        
+        String q1 = ""//
+                + "SELECT ?a ?b ?c ?d ?e ?f ?q ?g ?h " //
+                + "{" //
+                + " GRAPH ?x { " //
+                + "  ?a a ?b ."//
+                + "  ?b <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
+                + "  ?d <uri:talksTo> ?e . "//
+                + "  FILTER(bound(?f) && sameTerm(?a,?b)&&bound(?q)). " //
+                + "  FILTER ( ?e < ?f && (?a > ?b || ?c = ?d) ). " //
+                + "  FILTER(?g IN (1,2,3) && ?h NOT IN(5,6,7)). " //
+                + "  ?x <http://www.w3.org/2000/01/rdf-schema#label> ?g. "//
+                + "  ?b a ?q ."//
+                + "     }"//
+                + "}";//
+        
+        
+        String q2 = ""//
+                + "SELECT ?m ?n ?r ?y " //
+                + "{" //
+                + " GRAPH ?q { " //
+                + "  FILTER(?m IN (1,?y,3) && ?n NOT IN(?r,6,7)). " //
+                + "  ?q <http://www.w3.org/2000/01/rdf-schema#label> ?m. "//
+                + "     }"//
+                + "}";//
+        
+        
+        SPARQLParser parser1 = new SPARQLParser();
+        SPARQLParser parser2 = new SPARQLParser();
+
+        ParsedQuery pq1 = parser1.parseQuery(q1, null);
+        ParsedQuery pq2 = parser2.parseQuery(q2, null);
+
+        List<TupleExpr> normalize = 
QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
+                pq2.getTupleExpr());
+
+        
+        Assert.assertEquals(1, normalize.size());
+        
+        for(TupleExpr te: normalize) {
+            Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
+        }
+        
+        FilterCollector fc1 = new FilterCollector();
+        pq1.getTupleExpr().visit(fc1);
+        List<QueryModelNode> fList1 = fc1.getFilters();
+        
+        for(TupleExpr te: normalize) {
+            FilterCollector fc2 = new FilterCollector();
+            te.visit(fc2);
+            List<QueryModelNode> fList2 = fc2.getFilters();
+            
+            for(QueryModelNode q: fList2) {
+                Assert.assertTrue(fList1.contains(q));
+            }
+        }
+
+
+
+    }
+    
+    
+   
+    
+    
+    
+    @Test
+    public void queryCompoundFilterMatch() throws Exception {
+
+        
+        
+        String q17 = ""//
+                + "SELECT ?j ?k ?l ?m ?n ?o " //
+                + "{" //
+                + " GRAPH ?z { " //
+                + "  ?j <uri:talksTo> ?k . "//
+                + "  FILTER ( ?k < ?l && (?m > ?n || ?o = ?j) ). " //
+                + "     }"//
+                + "}";//
+        
+//        String q18 = ""//
+//                + "SELECT ?r ?s ?t ?u " //
+//                + "{" //
+//                + " GRAPH ?q { " //
+//                + "  FILTER(bound(?r) && sameTerm(?s,?t)&&bound(?u)). " //
+//                + "  ?t a ?u ."//
+//                + "     }"//
+//                + "}";//
+        
+        
+        
+        String q19 = ""//
+                + "SELECT ?a ?b ?c ?d ?f ?q ?g ?h " //
+                + "{" //
+                + " GRAPH ?x { " //
+                + "  ?a a ?b ."//
+                + "  ?b <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
+                + "  ?d <uri:talksTo> \"5\" . "//
+                + "  FILTER ( \"5\" < ?f && (?a > ?b || ?c = ?d) ). " //
+                + "  FILTER(bound(?f) && sameTerm(?a,?b)&&bound(?q)). " //
+                + "  FILTER(?g IN (1,2,3) && ?h NOT IN(5,6,7)). " //
+                + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> ?g. "//
+                + "  ?b a ?q ."//
+                + "     }"//
+                + "}";//
+        
+        
+//        String q20 = ""//
+//                + "SELECT ?m ?n ?o " //
+//                + "{" //
+//                + " GRAPH ?q { " //
+//                + "  FILTER(?m IN (1,?o,3) && ?n NOT IN(5,6,7)). " //
+//                + "  ?n <http://www.w3.org/2000/01/rdf-schema#label> ?m. "//
+//                + "     }"//
+//                + "}";//
+        
+        
+        
+        
+        SPARQLParser parser1 = new SPARQLParser();
+        SPARQLParser parser2 = new SPARQLParser();
+
+        ParsedQuery pq1 = parser1.parseQuery(q19, null);
+        ParsedQuery pq2 = parser2.parseQuery(q17, null);
+
+        List<TupleExpr> normalize = 
QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
+                pq2.getTupleExpr());
+
+        
+        
+        System.out.println(normalize);
+        
+        Assert.assertEquals(1, normalize.size());
+        
+        for(TupleExpr te: normalize) {
+            Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
+        }
+        
+        FilterCollector fc1 = new FilterCollector();
+        pq1.getTupleExpr().visit(fc1);
+        List<QueryModelNode> fList1 = fc1.getFilters();
+        
+        for(TupleExpr te: normalize) {
+            FilterCollector fc2 = new FilterCollector();
+            te.visit(fc2);
+            List<QueryModelNode> fList2 = fc2.getFilters();
+            
+            for(QueryModelNode q: fList2) {
+                Assert.assertTrue(fList1.contains(q));
+            }
+        }
+
+
+
+    }
+    
+    
+    
+    
+    
+//    @Test
+//    public void queryCompoundFilterMatch2() throws Exception {
+//
+//        
+//        
+//     
+//        
+//        
+//        String q19 = ""//
+//                + "SELECT ?a ?b ?c ?d ?f ?q ?g ?h " //
+//                + "{" //
+//                + " GRAPH ?x { " //
+//                + "  ?a a ?b ."//
+//                + "  ?b <http://www.w3.org/2000/01/rdf-schema#label> ?c ."//
+//                + "  ?d <uri:talksTo> \"5\" . "//
+//                + "  FILTER ( \"5\" < ?f && (?a > ?b || ?c = ?d) ). " //
+//                + "  FILTER(bound(?f) && sameTerm(?a,?b)&&bound(?q)). " //
+//                + "  FILTER(?g IN (1,5,3) && ?h NOT IN(5,6,7)). " //
+//                + "  ?h <http://www.w3.org/2000/01/rdf-schema#label> ?g. "//
+//                + "  ?b a ?q ."//
+//                + "     }"//
+//                + "}";//
+//        
+//        
+//        String q20 = ""//
+//                + "SELECT ?m ?n ?o ?f ?a ?b ?c ?d " //
+//                + "{" //
+//                + " GRAPH ?q { " //
+//                + "  ?d <uri:talksTo> ?o . "//
+//                + "  FILTER ( ?o < ?f && (?a > ?b || ?c = ?d) ). " //
+//                + "  FILTER(?m IN (1,?o,3) && ?n NOT IN(5,6,7)). " //
+//                + "  ?n <http://www.w3.org/2000/01/rdf-schema#label> ?m. "//
+//                + "     }"//
+//                + "}";//
+//        
+//        
+//        
+//        
+//        SPARQLParser parser1 = new SPARQLParser();
+//        SPARQLParser parser2 = new SPARQLParser();
+//
+//        ParsedQuery pq1 = parser1.parseQuery(q19, null);
+//        ParsedQuery pq2 = parser2.parseQuery(q20, null);
+//
+//        List<TupleExpr> normalize = 
QueryVariableNormalizer.getNormalizedIndex(pq1.getTupleExpr(),
+//                pq2.getTupleExpr());
+//
+//        
+//        
+//        System.out.println(normalize);
+//        
+//        Assert.assertEquals(1, normalize.size());
+//        
+//        for(TupleExpr te: normalize) {
+//            Assert.assertTrue(isTupleSubset(pq1.getTupleExpr(), te));
+//        }
+//        
+//        FilterCollector fc1 = new FilterCollector();
+//        pq1.getTupleExpr().visit(fc1);
+//        List<QueryModelNode> fList1 = fc1.getFilters();
+//        
+//        for(TupleExpr te: normalize) {
+//            FilterCollector fc2 = new FilterCollector();
+//            te.visit(fc2);
+//            List<QueryModelNode> fList2 = fc2.getFilters();
+//            
+//            for(QueryModelNode q: fList2) {
+//                Assert.assertTrue(fList1.contains(q));
+//            }
+//        }
+//
+//
+//
+//    }
+//    
+//    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    
+    private static class FilterCollector extends 
QueryModelVisitorBase<RuntimeException> {
+
+        private List<QueryModelNode> filterList = Lists.newArrayList();
+
+        public List<QueryModelNode> getFilters() {
+            return filterList;
+        }
+
+        @Override
+        public void meet(Filter node) {
+            filterList.add(node.getCondition());
+            super.meet(node);
+        }
+
+    }
+
+    
+    
+    
+
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexingSailExample/pom.xml
----------------------------------------------------------------------
diff --git a/extras/indexingSailExample/pom.xml 
b/extras/indexingSailExample/pom.xml
new file mode 100644
index 0000000..130ab44
--- /dev/null
+++ b/extras/indexingSailExample/pom.xml
@@ -0,0 +1,80 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+    <parent>
+        <groupId>mvm.rya</groupId>
+        <artifactId>rya.extras</artifactId>
+        <version>3.2.9</version>
+    </parent>
+
+    <modelVersion>4.0.0</modelVersion>
+    <name>${project.groupId}.${project.artifactId}</name>
+    <artifactId>rya.indexingSail.example</artifactId>
+
+    <dependencies>
+           <dependency>
+            <groupId>mvm.rya</groupId>
+            <artifactId>rya.prospector</artifactId>
+        </dependency>
+           
+        
+       <dependency>
+            <groupId>mvm.rya</groupId>
+            <artifactId>mongodb.rya</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+               <dependency>
+            <groupId>mvm.rya</groupId>
+            <artifactId>rya.indexing</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>mvm.rya</groupId>
+            <artifactId>rya.indexing</artifactId>
+            <classifier>accumulo-server</classifier>
+            <version>${project.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>mvm.rya</groupId>
+            <artifactId>rya.indexing</artifactId>
+            <classifier>map-reduce</classifier>
+            <version>${project.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.accumulo</groupId>
+            <artifactId>accumulo-core</artifactId>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.thrift</groupId>
+            <artifactId>libthrift</artifactId>
+        </dependency>
+
+        <dependency>
+          <groupId>org.locationtech.geomesa</groupId>
+          <artifactId>geomesa-distributed-runtime</artifactId>
+          <version>${geomesa.version}</version>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <version>2.4</version>
+                <configuration>
+                    <descriptors>
+                        <descriptor>src/main/assembly/assembly.xml</descriptor>
+                    </descriptors>
+                </configuration>
+                <executions>
+                    <execution>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexingSailExample/src/main/assembly/assembly.xml
----------------------------------------------------------------------
diff --git a/extras/indexingSailExample/src/main/assembly/assembly.xml 
b/extras/indexingSailExample/src/main/assembly/assembly.xml
new file mode 100644
index 0000000..047ea5f
--- /dev/null
+++ b/extras/indexingSailExample/src/main/assembly/assembly.xml
@@ -0,0 +1,50 @@
+<assembly 
xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+    
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0
 http://maven.apache.org/xsd/assembly-1.1.0.xsd";>
+
+    <id>distribution</id>
+    <formats>
+        <format>zip</format>
+    </formats>
+    <includeBaseDirectory>false</includeBaseDirectory>
+
+    <dependencySets>
+        <dependencySet>
+            <outputDirectory>accumulo/lib/ext</outputDirectory>
+            <includes>
+                <include>mvm.rya:rya.indexing:*:accumulo-server</include>
+                
<include>org.locationtech.geomesa:geomesa-distributed-runtime:*</include>
+            </includes>
+        </dependencySet>
+        <dependencySet>
+            <outputDirectory>map-reduce</outputDirectory>
+            <includes>
+                <include>mvm.rya:rya.indexing:*:map-reduce</include>
+            </includes>
+        </dependencySet>
+        <dependencySet>
+            <outputDirectory>dist/lib</outputDirectory>
+            <includes>
+                <include>*</include>
+            </includes>
+            <excludes>
+                <!-- Do not include the example jar. Example batch script 
builds the example -->
+                <exclude>mvm.rya:rya.indexingSail.example</exclude>
+
+                <!-- Do not include the MR or Accumulo Server builds -->
+                <exclude>mvm.rya:rya.indexing:*:accumulo-server</exclude>
+                <exclude>mvm.rya:rya.indexing:*:map-reduce</exclude>
+            </excludes>
+            <scope>test</scope>
+        </dependencySet>
+    </dependencySets>
+    <files>
+        <file>
+            <source>src/main/scripts/RunRyaDirectExample.bat</source>
+            <outputDirectory>dist</outputDirectory>
+        </file>
+        <file>
+            <source>src/main/java/RyaDirectExample.java</source>
+            <outputDirectory>dist</outputDirectory>
+        </file>
+    </files>
+</assembly>

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexingSailExample/src/main/java/EntityDirectExample.java
----------------------------------------------------------------------
diff --git a/extras/indexingSailExample/src/main/java/EntityDirectExample.java 
b/extras/indexingSailExample/src/main/java/EntityDirectExample.java
new file mode 100644
index 0000000..408c754
--- /dev/null
+++ b/extras/indexingSailExample/src/main/java/EntityDirectExample.java
@@ -0,0 +1,292 @@
+
+
+import java.util.List;
+
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.indexing.RyaSailFactory;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+
+import org.apache.accumulo.core.client.AccumuloException;
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.apache.commons.lang.Validate;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.MalformedQueryException;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.QueryLanguage;
+import org.openrdf.query.QueryResultHandlerException;
+import org.openrdf.query.TupleQuery;
+import org.openrdf.query.TupleQueryResultHandler;
+import org.openrdf.query.TupleQueryResultHandlerException;
+import org.openrdf.query.Update;
+import org.openrdf.query.UpdateExecutionException;
+import org.openrdf.repository.RepositoryException;
+import org.openrdf.repository.sail.SailRepository;
+import org.openrdf.repository.sail.SailRepositoryConnection;
+import org.openrdf.sail.Sail;
+
+public class EntityDirectExample {
+    private static final Logger log = 
Logger.getLogger(EntityDirectExample.class);
+
+    //
+    // Connection configuration parameters
+    //
+
+    private static final boolean USE_MOCK_INSTANCE = true;
+    private static final boolean PRINT_QUERIES = true;
+    private static final String INSTANCE = "instance";
+    private static final String RYA_TABLE_PREFIX = "x_test_triplestore_";
+    private static final String AUTHS = "U";
+    
+    public static void main(String[] args) throws Exception {
+        Configuration conf = getConf();
+        conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
+        
+        log.info("Creating the tables as root.");
+        SailRepository repository = null;
+        SailRepositoryConnection conn = null;
+      
+        try {
+            log.info("Connecting to Indexing Sail Repository.");
+            
+            Sail extSail = RyaSailFactory.getInstance(conf);
+            repository = new SailRepository(extSail);
+            repository.initialize();
+            conn = repository.getConnection();
+
+            log.info("Running SPARQL Example: Add and Delete");
+            testAddAndDelete(conn);
+            log.info("Running SAIL/SPARQL Example: Add and Temporal Search");
+            testAddAndTemporalSearchWithPCJ(conn);
+            
+        } finally {
+            log.info("Shutting down");
+            closeQuietly(conn);
+            closeQuietly(repository);
+        }
+    }
+
+    private static void closeQuietly(SailRepository repository) {
+        if (repository != null) {
+            try {
+                repository.shutDown();
+            } catch (RepositoryException e) {
+                // quietly absorb this exception
+            }
+        }
+    }
+
+    private static void closeQuietly(SailRepositoryConnection conn) {
+        if (conn != null) {
+            try {
+                conn.close();
+            } catch (RepositoryException e) {
+                // quietly absorb this exception
+            }
+        }
+    }
+
+    
+
+
+   
+    public static void testAddAndDelete(SailRepositoryConnection conn) throws 
MalformedQueryException,
+            RepositoryException, UpdateExecutionException, 
QueryEvaluationException, TupleQueryResultHandlerException,
+            AccumuloException, AccumuloSecurityException, 
TableNotFoundException {
+
+        // Add data
+        String query = "INSERT DATA\n"//
+                + "{ GRAPH <http://updated/test> {\n"//
+                + "  <http://acme.com/people/Mike> " //
+                + "       <http://acme.com/actions/likes> \"A new book\" ;\n"//
+                + "       <http://acme.com/actions/likes> \"Avocados\" .\n" + 
"} }";
+
+        log.info("Performing Query");
+
+        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+        update.execute();
+        
+        query = "select ?x {GRAPH <http://updated/test> {?x 
<http://acme.com/actions/likes> \"A new book\" . "//
+                + " ?x <http://acme.com/actions/likes> \"Avocados\" }}";
+        CountingResultHandler resultHandler = new CountingResultHandler();
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, 
query);
+        tupleQuery.evaluate(resultHandler);
+        log.info("Result count : " + resultHandler.getCount());
+
+        Validate.isTrue(resultHandler.getCount() == 1);
+        resultHandler.resetCount();
+
+        //TODO delete currently not implemented in AccumuloRyaDAO for 
+//        // Delete Data
+//        query = "DELETE DATA\n" //
+//                + "{ GRAPH <http://updated/test> {\n"
+//                + "  <http://acme.com/people/Mike> 
<http://acme.com/actions/likes> \"A new book\" ;\n"
+//                + "   <http://acme.com/actions/likes> \"Avocados\" .\n" + 
"}}";
+//
+//        update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+//        update.execute();
+//
+//        query = "select ?x {GRAPH <http://updated/test> {?x 
<http://acme.com/actions/likes> \"A new book\" . "//
+//                + " ?x <http://acme.com/actions/likes> \"Avocados\" }}";
+//        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+//        tupleQuery.evaluate(resultHandler);
+//        log.info("Result count : " + resultHandler.getCount());
+//
+//        Validate.isTrue(resultHandler.getCount() == 0);
+    }
+    
+    
+
+    
+    
+    private static void 
testAddAndTemporalSearchWithPCJ(SailRepositoryConnection conn) throws Exception 
{
+
+        // create some resources and literals to make statements out of
+
+        String sparqlInsert = "PREFIX pref: <http://www.model/pref#> \n"
+                + "INSERT DATA {\n" //
+                + "<urn:Bob>       a       pref:Person ;\n" //
+                + "     pref:hasProperty1 'property1' ;\n" //  one second
+                + "     pref:hasProperty2 'property2' ;\n" //   2 seconds
+                + "     pref:hasProperty3 'property3' .\n" //   3 seconds
+                + "<urn:Fred>      a       pref:Person ; \n" //
+                + "     pref:hasProperty4 'property4' ; \n" //
+                + "     pref:hasProperty5 'property5' ; \n" //
+                + "}";
+
+        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, sparqlInsert);
+        update.execute();
+        
+        String queryString = "PREFIX pref: <http://www.model/pref#> \n" //
+                + "SELECT ?x ?z \n" //
+                + "WHERE { \n"
+                + "  ?x a ?z. \n"
+                + "  ?x pref:hasProperty1 'property1' . \n"//
+                + "  ?x pref:hasProperty2 'property2' . \n"//
+                + "  ?x pref:hasProperty3 'property3' . \n"//
+                + "}";//
+       
+        
+
+        TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, 
queryString);
+        CountingResultHandler tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 1);
+        Validate.isTrue(tupleHandler.getBsSize() == 2);
+        
+        queryString = "PREFIX pref: <http://www.model/pref#> \n" //
+                + "SELECT ?x ?w ?z \n" //
+                + "WHERE { \n"
+                + "  ?x a ?z. \n"
+                + "  ?x pref:hasProperty4 'property4' . \n"//
+                + "  ?x pref:hasProperty5 ?w . \n"//
+                + "}";//
+       
+        
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 1);
+        Validate.isTrue(tupleHandler.getBsSize() == 3);
+        
+        
+        queryString = "PREFIX pref: <http://www.model/pref#> " 
+                + "SELECT ?v ?w ?x ?y ?z " 
+                + "WHERE { " 
+                + "  ?w a ?z  . " 
+                + "  ?w pref:hasProperty1 ?v . " 
+                + "  ?w pref:hasProperty2 'property2' . " 
+                + "  ?w pref:hasProperty3 'property3' . " 
+                + "  ?x a ?z  . "
+                + "  ?x pref:hasProperty4 'property4' . " 
+                + "  ?x pref:hasProperty5 ?y . " 
+                + "}";
+       
+        
+
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 1);
+        Validate.isTrue(tupleHandler.getBsSize() == 5);
+        
+    }
+    
+    
+    private static Configuration getConf() {
+
+        AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration();
+
+        conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, USE_MOCK_INSTANCE);
+        conf.set(ConfigUtils.USE_ENTITY, "true");
+        conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, 
RYA_TABLE_PREFIX);
+        conf.set(ConfigUtils.ENTITY_TABLENAME, RYA_TABLE_PREFIX + "entity");
+        conf.set(ConfigUtils.CLOUDBASE_USER, "root");
+        conf.set(ConfigUtils.CLOUDBASE_PASSWORD, "");
+        conf.set(ConfigUtils.CLOUDBASE_INSTANCE, INSTANCE);
+        conf.setInt(ConfigUtils.NUM_PARTITIONS, 3);
+        conf.set(ConfigUtils.CLOUDBASE_AUTHS, AUTHS);
+
+        return conf;
+    }
+    
+
+    private static class CountingResultHandler implements 
TupleQueryResultHandler {
+        private int count = 0;
+        private int bindingSize = 0;
+        private boolean bsSizeSet = false;
+
+        public int getCount() {
+            return count;
+        }
+        
+        public int getBsSize() {
+            return bindingSize;
+        }
+        
+        public void resetBsSize() {
+            bindingSize = 0;
+            bsSizeSet = false;
+        }
+
+        public void resetCount() {
+            this.count = 0;
+        }
+
+        @Override
+        public void startQueryResult(List<String> arg0) throws 
TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void handleSolution(BindingSet arg0) throws 
TupleQueryResultHandlerException {
+            count++;
+            if(!bsSizeSet) {
+                bindingSize = arg0.size();
+                bsSizeSet = true;
+            }
+            System.out.println(arg0);
+        }
+
+        @Override
+        public void endQueryResult() throws TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void handleBoolean(boolean arg0) throws 
QueryResultHandlerException {
+          // TODO Auto-generated method stub
+          
+        }
+
+        @Override
+        public void handleLinks(List<String> arg0) throws 
QueryResultHandlerException {
+          // TODO Auto-generated method stub
+          
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexingSailExample/src/main/java/MongoRyaDirectExample.java
----------------------------------------------------------------------
diff --git 
a/extras/indexingSailExample/src/main/java/MongoRyaDirectExample.java 
b/extras/indexingSailExample/src/main/java/MongoRyaDirectExample.java
new file mode 100644
index 0000000..3f02fb2
--- /dev/null
+++ b/extras/indexingSailExample/src/main/java/MongoRyaDirectExample.java
@@ -0,0 +1,288 @@
+import java.util.List;
+
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.indexing.RyaSailFactory;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.mongodb.MongoDBRdfConfiguration;
+
+import org.apache.commons.lang.Validate;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Logger;
+import org.openrdf.model.Namespace;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.MalformedQueryException;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.QueryLanguage;
+import org.openrdf.query.QueryResultHandlerException;
+import org.openrdf.query.TupleQuery;
+import org.openrdf.query.TupleQueryResultHandler;
+import org.openrdf.query.TupleQueryResultHandlerException;
+import org.openrdf.query.Update;
+import org.openrdf.query.UpdateExecutionException;
+import org.openrdf.repository.RepositoryException;
+import org.openrdf.repository.RepositoryResult;
+import org.openrdf.repository.sail.SailRepository;
+import org.openrdf.repository.sail.SailRepositoryConnection;
+import org.openrdf.sail.Sail;
+
+public class MongoRyaDirectExample {
+    private static final Logger log = 
Logger.getLogger(MongoRyaDirectExample.class);
+
+    //
+    // Connection configuration parameters
+    //
+
+    private static final boolean PRINT_QUERIES = true;
+    private static final String MONGO_DB = "rya";
+    private static final String MONGO_COLL_PREFIX = "rya_";
+
+    public static void main(String[] args) throws Exception {
+        Configuration conf = getConf();
+        conf.setBoolean(ConfigUtils.DISPLAY_QUERY_PLAN, PRINT_QUERIES);
+  
+        SailRepository repository = null;
+        SailRepositoryConnection conn = null;
+        try {
+            log.info("Connecting to Indexing Sail Repository.");
+            Sail sail = RyaSailFactory.getInstance(conf);
+            repository = new SailRepository(sail);
+            repository.initialize();
+            conn = repository.getConnection();
+
+            long start = System.currentTimeMillis();
+            log.info("Running SPARQL Example: Add and Delete");
+            testAddAndDelete(conn);
+            testAddAndDeleteNoContext(conn);
+            testAddNamespaces(conn);
+            testAddPointAndWithinSearch(conn);
+
+            log.info("TIME: " + (System.currentTimeMillis() - start) / 1000.);
+        } finally {
+            log.info("Shutting down");
+            closeQuietly(conn);
+            closeQuietly(repository);
+        }
+    }
+
+    private static void testAddPointAndWithinSearch(SailRepositoryConnection 
conn) throws Exception {
+
+        String update = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  
"//
+                + "INSERT DATA { " //
+                + "  <urn:feature> a geo:Feature ; " //
+                + "    geo:hasGeometry [ " //
+                + "      a geo:Point ; " //
+                + "      geo:asWKT \"Point(-77.03524 
38.889468)\"^^geo:wktLiteral "//
+                + "    ] . " //
+                + "}";
+
+        Update u = conn.prepareUpdate(QueryLanguage.SPARQL, update);
+        u.execute();
+
+        String queryString;
+        TupleQuery tupleQuery;
+        CountingResultHandler tupleHandler;
+
+        // ring containing point
+        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX geof: 
<http://www.opengis.net/def/function/geosparql/>  "//
+                + "SELECT ?feature ?point ?wkt " //
+                + "{" //
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-78 39, -77 39, -77 
38, -78 38, -78 39))\"^^geo:wktLiteral)) " //
+                + "}";//
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() >= 1); // may see points from 
during previous runs
+
+        // ring outside point
+        queryString = "PREFIX geo: <http://www.opengis.net/ont/geosparql#>  "//
+                + "PREFIX geof: 
<http://www.opengis.net/def/function/geosparql/>  "//
+                + "SELECT ?feature ?point ?wkt " //
+                + "{" //
+                + "  ?feature a geo:Feature . "//
+                + "  ?feature geo:hasGeometry ?point . "//
+                + "  ?point a geo:Point . "//
+                + "  ?point geo:asWKT ?wkt . "//
+                + "  FILTER(geof:sfWithin(?wkt, \"POLYGON((-77 39, -76 39, -76 
38, -77 38, -77 39))\"^^geo:wktLiteral)) " //
+                + "}";//
+        tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, queryString);
+
+        tupleHandler = new CountingResultHandler();
+        tupleQuery.evaluate(tupleHandler);
+        log.info("Result count : " + tupleHandler.getCount());
+        Validate.isTrue(tupleHandler.getCount() == 0);
+    }
+
+    private static void closeQuietly(SailRepository repository) {
+        if (repository != null) {
+            try {
+                repository.shutDown();
+            } catch (RepositoryException e) {
+                // quietly absorb this exception
+            }
+        }
+    }
+
+    private static void closeQuietly(SailRepositoryConnection conn) {
+        if (conn != null) {
+            try {
+                conn.close();
+            } catch (RepositoryException e) {
+                // quietly absorb this exception
+            }
+        }
+    }
+
+    private static Configuration getConf() {
+
+        Configuration conf = new Configuration();
+        conf.set(ConfigUtils.USE_MONGO, "true");
+        conf.set(MongoDBRdfConfiguration.USE_TEST_MONGO, "true");
+        conf.set(MongoDBRdfConfiguration.MONGO_DB_NAME, MONGO_DB);
+        conf.set(MongoDBRdfConfiguration.MONGO_COLLECTION_PREFIX, 
MONGO_COLL_PREFIX);
+        conf.set(ConfigUtils.GEO_PREDICATES_LIST, 
"http://www.opengis.net/ont/geosparql#asWKT";);
+        conf.set(ConfigUtils.USE_GEO, "true");
+        conf.set(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX, 
MONGO_COLL_PREFIX);
+        
+        return conf;
+    }
+
+
+
+    public static void testAddAndDelete(SailRepositoryConnection conn) throws 
MalformedQueryException, RepositoryException,
+            UpdateExecutionException, QueryEvaluationException, 
TupleQueryResultHandlerException {
+
+        // Add data
+        String query = "INSERT DATA\n"//
+                + "{ GRAPH <http://updated/test> {\n"//
+                + "  <http://acme.com/people/Mike> " //
+                + "       <http://acme.com/actions/likes> \"A new book\" ;\n"//
+                + "       <http://acme.com/actions/likes> \"Avocados\" .\n" + 
"} }";
+
+        log.info("Performing Query");
+
+        Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+        update.execute();
+
+         query = "select ?p ?o { GRAPH <http://updated/test> 
{<http://acme.com/people/Mike> ?p ?o . }}";
+         CountingResultHandler resultHandler = new CountingResultHandler();
+         TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, 
query);
+         tupleQuery.evaluate(resultHandler);
+         log.info("Result count : " + resultHandler.getCount());
+        
+         Validate.isTrue(resultHandler.getCount() == 2);
+        
+         resultHandler.resetCount();
+        
+         // Delete Data
+         query = "DELETE DATA\n" //
+         + "{ GRAPH <http://updated/test> {\n"
+         + "  <http://acme.com/people/Mike> <http://acme.com/actions/likes> 
\"A new book\" ;\n"
+         + "   <http://acme.com/actions/likes> \"Avocados\" .\n" + "}}";
+        
+         update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+         update.execute();
+        
+         query = "select ?p ?o { GRAPH <http://updated/test> 
{<http://acme.com/people/Mike> ?p ?o . }}";
+         tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+         tupleQuery.evaluate(resultHandler);
+         log.info("Result count : " + resultHandler.getCount());
+        
+         Validate.isTrue(resultHandler.getCount() == 0);
+    }
+
+    public static void testAddNamespaces(SailRepositoryConnection conn) throws 
MalformedQueryException, RepositoryException,
+    UpdateExecutionException, QueryEvaluationException, 
TupleQueryResultHandlerException {
+
+       conn.setNamespace("rya", "http://rya.com";);
+       RepositoryResult<Namespace> results = conn.getNamespaces();
+       for (Namespace space : results.asList()){
+               System.out.println(space.getName() + ", " + space.getPrefix());
+       }
+      }
+
+    public static void testAddAndDeleteNoContext(SailRepositoryConnection 
conn) throws MalformedQueryException, RepositoryException,
+    UpdateExecutionException, QueryEvaluationException, 
TupleQueryResultHandlerException {
+
+       // Add data
+       String query = "INSERT DATA\n"//
+                       + "{ \n"//
+                       + "  <http://acme.com/people/Mike> " //
+                       + "       <http://acme.com/actions/likes> \"A new 
book\" ;\n"//
+                       + "       <http://acme.com/actions/likes> \"Avocados\" 
.\n" + " }";
+
+       log.info("Performing Query");
+
+       Update update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+       update.execute();
+
+       query = "select ?p ?o {<http://acme.com/people/Mike> ?p ?o . }";
+       CountingResultHandler resultHandler = new CountingResultHandler();
+       TupleQuery tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, 
query);
+       tupleQuery.evaluate(resultHandler);
+       log.info("Result count : " + resultHandler.getCount());
+
+       Validate.isTrue(resultHandler.getCount() == 2);
+
+       resultHandler.resetCount();
+
+       // Delete Data
+       query = "DELETE DATA\n" //
+                       + "{ \n"
+                       + "  <http://acme.com/people/Mike> 
<http://acme.com/actions/likes> \"A new book\" ;\n"
+                       + "   <http://acme.com/actions/likes> \"Avocados\" .\n" 
+ "}";
+
+       update = conn.prepareUpdate(QueryLanguage.SPARQL, query);
+       update.execute();
+
+       query = "select ?p ?o { {<http://acme.com/people/Mike> ?p ?o . }}";
+       tupleQuery = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+       tupleQuery.evaluate(resultHandler);
+       log.info("Result count : " + resultHandler.getCount());
+
+       Validate.isTrue(resultHandler.getCount() == 0);
+    }
+
+    private static class CountingResultHandler implements 
TupleQueryResultHandler {
+        private int count = 0;
+
+        public int getCount() {
+            return count;
+        }
+
+        public void resetCount() {
+            this.count = 0;
+        }
+
+        @Override
+        public void startQueryResult(List<String> arg0) throws 
TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void handleSolution(BindingSet arg0) throws 
TupleQueryResultHandlerException {
+            count++;
+        }
+
+        @Override
+        public void endQueryResult() throws TupleQueryResultHandlerException {
+        }
+
+        @Override
+        public void handleBoolean(boolean arg0) throws 
QueryResultHandlerException {
+          // TODO Auto-generated method stub
+          
+        }
+
+        @Override
+        public void handleLinks(List<String> arg0) throws 
QueryResultHandlerException {
+          // TODO Auto-generated method stub
+          
+        }
+    }
+}

Reply via email to