http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelector.java
----------------------------------------------------------------------
diff --git 
a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelector.java
 
b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelector.java
new file mode 100644
index 0000000..92aa393
--- /dev/null
+++ 
b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ThreshholdPlanSelector.java
@@ -0,0 +1,221 @@
+package mvm.rya.indexing.IndexPlanValidator;
+
+import java.util.Iterator;
+import java.util.Set;
+
+import mvm.rya.indexing.external.tupleSet.ExternalTupleSet;
+
+import org.openrdf.query.algebra.BindingSetAssignment;
+import org.openrdf.query.algebra.Filter;
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.Projection;
+import org.openrdf.query.algebra.QueryModelNode;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.evaluation.impl.ExternalSet;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+
+import com.google.common.collect.Sets;
+
+public class ThreshholdPlanSelector implements IndexedQueryPlanSelector {
+
+    private TupleExpr query;
+    private int queryNodeCount = 0;
+
+    public ThreshholdPlanSelector(TupleExpr query) {
+        this.query = query;
+        QueryNodeCount qnc = new QueryNodeCount();
+        query.visit(qnc);
+
+        this.queryNodeCount = qnc.getNodeCount();
+        
+        if(queryNodeCount == 0) {
+            throw new IllegalArgumentException("TupleExpr must contain at 
least one node!");
+        }
+    }
+
+    
+    
+    
+    @Override
+    public TupleExpr getThreshholdQueryPlan(Iterator<TupleExpr> tuples, double 
threshhold, double indexWeight,
+            double commonVarWeight, double extProdWeight) {
+
+        if (threshhold < 0 || threshhold > 1) {
+            throw new IllegalArgumentException("Threshhold must be between 0 
and 1!");
+        }
+        double minCost = Double.MAX_VALUE;
+        TupleExpr minTup = null;
+
+        double tempCost = 0;
+        TupleExpr tempTup = null;
+
+        
+        
+        while (tuples.hasNext()) {
+
+            tempTup = tuples.next();
+            tempCost = getCost(tempTup, indexWeight, commonVarWeight, 
extProdWeight);
+
+            if (tempCost < minCost) {
+                minCost = tempCost;
+                minTup = tempTup;
+            }
+
+            if (minCost <= threshhold) {
+                return minTup;
+            }
+
+        }
+
+        return minTup;
+    }
+
+    public double getCost(TupleExpr te, double indexWeight, double 
commonVarWeight, double dirProdWeight) {
+
+        if (indexWeight + commonVarWeight + dirProdWeight != 1) {
+            throw new IllegalArgumentException("Weights must sum to 1!");
+        }
+        
+        if(te == null) {
+            throw new IllegalArgumentException("TupleExpr cannot be null!");
+        }
+
+        QueryNodeCount qnc = new QueryNodeCount();
+        te.visit(qnc);
+
+        double nodeCount = qnc.getNodeCount();
+        double commonJoinVars = qnc.getCommonJoinVarCount();
+        double joinVars = qnc.getJoinVarCount();
+        double joinCount = qnc.getJoinCount();
+        double dirProdCount = qnc.getDirProdCount();
+        double dirProductScale;
+        
+        if(queryNodeCount > nodeCount) {
+            dirProductScale = 1/((double)(queryNodeCount - nodeCount));
+        } else {
+            dirProductScale = 1/((double)(queryNodeCount - nodeCount + 1));
+        }
+        
+        double joinVarRatio;
+        double dirProductRatio;
+        
+        if(joinVars != 0) {
+            joinVarRatio = (joinVars - commonJoinVars) / joinVars;
+        } else {
+            joinVarRatio = 0;
+        }
+        
+        if(joinCount != 0) {
+            dirProductRatio = dirProdCount / joinCount;
+        } else {
+            dirProductRatio = 0;
+        }
+        
+        
+        double cost = indexWeight * (nodeCount / queryNodeCount) + 
commonVarWeight*joinVarRatio
+                + dirProdWeight *dirProductRatio*dirProductScale;
+        
+//        System.out.println("Tuple is " + te + " and cost is " + cost);
+//        System.out.println("Node count is " + nodeCount + " and query node 
count is " + queryNodeCount);
+//        System.out.println("Common join vars are " + commonJoinVars + " and 
join vars " + joinVars);
+//        System.out.println("Join count is " + joinCount + " and direct prod 
count is " + dirProdCount);
+        
+        return cost;
+    }
+
+    public static class QueryNodeCount extends 
QueryModelVisitorBase<RuntimeException> {
+
+        private int nodeCount = 0;
+        private int commonJoinVars = 0;
+        private int joinVars = 0;
+        private int joinCount = 0;
+        private int dirProdCount = 0;
+
+        public int getCommonJoinVarCount() {
+            return commonJoinVars;
+        }
+
+        public int getJoinVarCount() {
+            return joinVars;
+        }
+
+        public int getNodeCount() {
+            return nodeCount;
+        }
+
+        public int getJoinCount() {
+            return joinCount;
+        }
+
+        public int getDirProdCount() {
+            return dirProdCount;
+        }
+
+        public void meet(Projection node) {
+            node.getArg().visit(this);
+        }
+
+        public void meetNode(QueryModelNode node) {
+            if (node instanceof ExternalTupleSet) {
+                nodeCount += 1;
+                return;
+            }
+            super.meetNode(node);
+            return;
+        }
+
+        @Override
+        public void meet(StatementPattern node) {
+            nodeCount += 1;
+            return;
+        }
+
+        @Override
+        public void meet(Filter node) {
+            nodeCount += 1;
+            node.getArg().visit(this);
+        }
+
+        public void meet(BindingSetAssignment node) {
+            nodeCount += 1;
+            return;
+        }
+
+        @Override
+        public void meet(Join node) {
+
+            int tempCount = 0;
+
+            Set<String> lNames = node.getLeftArg().getAssuredBindingNames();
+            Set<String> rNames = node.getRightArg().getAssuredBindingNames();
+            
+            for(String s: node.getLeftArg().getBindingNames()) {
+                if(s.startsWith("-const-")) {
+                    lNames.remove(s);
+                }
+            }
+            
+            for(String s: node.getRightArg().getBindingNames()) {
+                if(s.startsWith("-const-")) {
+                    rNames.remove(s);
+                }
+            }
+           
+            
+            joinVars += Math.min(lNames.size(), rNames.size());
+            tempCount = Sets.intersection(lNames, rNames).size();
+            if (tempCount == 0) {
+                dirProdCount += 1;
+            } else {
+                commonJoinVars += tempCount;
+            }
+            joinCount += 1;
+
+            super.meet(node);
+
+        }
+
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGenerator.java
----------------------------------------------------------------------
diff --git 
a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGenerator.java
 
b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGenerator.java
new file mode 100644
index 0000000..137323b
--- /dev/null
+++ 
b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleExecutionPlanGenerator.java
@@ -0,0 +1,200 @@
+package mvm.rya.indexing.IndexPlanValidator;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+import java.util.Set;
+
+import mvm.rya.indexing.external.tupleSet.ExternalTupleSet;
+import mvm.rya.rdftriplestore.inference.DoNotExpandSP;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+
+import org.openrdf.query.algebra.BindingSetAssignment;
+import org.openrdf.query.algebra.Filter;
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.Projection;
+import org.openrdf.query.algebra.QueryModelNode;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+
+import com.beust.jcommander.internal.Lists;
+import com.google.common.collect.Collections2;
+import com.google.common.collect.Maps;
+import com.google.common.collect.Sets;
+
+public class TupleExecutionPlanGenerator implements IndexTupleGenerator {
+
+    
+    
+    @Override
+    public Iterator<TupleExpr> getPlans(Iterator<TupleExpr> indexPlans) {
+
+        final Iterator<TupleExpr> iter = indexPlans;
+        
+        return new Iterator<TupleExpr>() {
+
+            private TupleExpr next = null;
+            private boolean hasNextCalled = false;
+            private boolean isEmpty = false;
+            Iterator<TupleExpr> tuples = null;
+
+            @Override
+            public boolean hasNext() {
+
+                if (!hasNextCalled && !isEmpty) {
+                    if (tuples != null && tuples.hasNext()) {
+                        next = tuples.next();
+                        hasNextCalled = true;
+                        return true;
+                    } else {
+                        while (iter.hasNext()) {
+                            tuples = getPlans(iter.next()).iterator();
+                            if (tuples == null) {
+                                throw new IllegalStateException("Plans cannot 
be null!");
+                            }
+                            next = tuples.next();
+                            hasNextCalled = true;
+                            return true;
+                        }
+                        isEmpty = true;
+                        return false;
+                    }
+                } else if (isEmpty) {
+                    return false;
+                } else {
+                    return true;
+                }
+            }
+
+            @Override
+            public TupleExpr next() {
+
+                if (hasNextCalled) {
+                    hasNextCalled = false;
+                    return next;
+                } else if(isEmpty) {
+                    throw new NoSuchElementException();
+                }else {
+                    if (this.hasNext()) {
+                        hasNextCalled = false;
+                        return next;
+                    } else {
+                        throw new NoSuchElementException();
+                    }
+
+                }
+
+            }
+            
+            @Override
+            public void remove() {
+                throw new UnsupportedOperationException("Cannot delete from 
iterator!");
+            }
+
+        };
+              
+    }
+
+    private List<TupleExpr> getPlans(TupleExpr te) {
+
+        
+        NodeCollector nc = new NodeCollector();
+        te.visit(nc);
+
+        Set<QueryModelNode> nodeSet = nc.getNodeSet();
+        List<Filter> filterList = nc.getFilterSet();
+        Projection projection = nc.getProjection().clone();
+  
+        List<TupleExpr> queryPlans = Lists.newArrayList();
+
+        Collection<List<QueryModelNode>> plans = 
Collections2.permutations(nodeSet);
+
+        for (List<QueryModelNode> p : plans) {
+            if (p.size() == 0) {
+                throw new IllegalArgumentException("Tuple must contain at 
least one node!");
+            } else if (p.size() == 1) {
+                queryPlans.add(te);
+            } else {
+                queryPlans.add(buildTuple(p, filterList, projection));
+            }
+        }
+
+        return queryPlans;
+    }
+
+    private TupleExpr buildTuple(List<QueryModelNode> nodes, List<Filter> 
filters, Projection projection) {
+
+        Projection proj = (Projection)projection.clone();
+        Join join = null;
+
+        join = new Join((TupleExpr) nodes.get(0).clone(), (TupleExpr) 
nodes.get(1).clone());
+
+        for (int i = 2; i < nodes.size(); i++) {
+            join = new Join(join, (TupleExpr) nodes.get(i).clone());
+        }
+
+        if (filters.size() == 0) {
+            proj.setArg(join);
+            return proj;
+        } else {
+            TupleExpr queryPlan = join;
+            for (Filter f : filters) {
+                Filter filt = (Filter) f.clone();
+                filt.setArg(queryPlan);
+                queryPlan = filt;
+            }
+            proj.setArg(queryPlan);
+            return proj;
+        }
+
+    }
+
+    public static class NodeCollector extends 
QueryModelVisitorBase<RuntimeException> {
+
+        private Set<QueryModelNode> nodeSet = Sets.newHashSet();
+        private List<Filter> filterSet = Lists.newArrayList();
+        private Projection projection;
+
+        public Projection getProjection() {
+            return projection;
+        }
+
+        public Set<QueryModelNode> getNodeSet() {
+            return nodeSet;
+        }
+
+        public List<Filter> getFilterSet() {
+            return filterSet;
+        }
+
+        @Override
+        public void meet(Projection node) {
+            projection = node;
+            node.getArg().visit(this);
+        }
+
+        @Override
+        public void meetNode(QueryModelNode node) throws RuntimeException {
+            if (node instanceof ExternalTupleSet || node instanceof 
BindingSetAssignment
+                    || node instanceof StatementPattern) {
+                nodeSet.add(node);
+            }
+            super.meetNode(node);
+        }
+
+        @Override
+        public void meet(Filter node) {
+            filterSet.add(node);
+            node.getArg().visit(this);
+        }
+
+    }
+    
+    
+    
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleReArranger.java
----------------------------------------------------------------------
diff --git 
a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleReArranger.java
 
b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleReArranger.java
new file mode 100644
index 0000000..ac08d97
--- /dev/null
+++ 
b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleReArranger.java
@@ -0,0 +1,328 @@
+package mvm.rya.indexing.IndexPlanValidator;
+
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.NoSuchElementException;
+
+import mvm.rya.indexing.external.tupleSet.ExternalTupleSet;
+import mvm.rya.rdftriplestore.inference.DoNotExpandSP;
+import mvm.rya.rdftriplestore.utils.FixedStatementPattern;
+
+import org.openrdf.query.algebra.Filter;
+import org.openrdf.query.algebra.Join;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+
+import com.beust.jcommander.internal.Lists;
+import com.google.common.collect.Collections2;
+import com.google.common.collect.Maps;
+
+
+//A given TupleExpr can be broken up into "join segments", which are sections 
of the TupleExpr where nodes can
+//be freely exchanged.  This class creates a list of permuted TupleExpr from a 
specified TupleExpr by permuting the nodes
+//in each join segment.
+public class TupleReArranger {
+
+    private static Map<Join, List<List<TupleExpr>>> joinArgs;
+    private static Map<Join, List<Filter>> filterArgs;
+
+    
+    public static Iterator<TupleExpr> getPlans(Iterator<TupleExpr> indexPlans) 
{
+
+        final Iterator<TupleExpr> iter = indexPlans;
+
+        return new Iterator<TupleExpr>() {
+
+            private TupleExpr next = null;
+            private boolean hasNextCalled = false;
+            private boolean isEmpty = false;
+            Iterator<TupleExpr> tuples = null;
+
+            @Override
+            public boolean hasNext() {
+
+                if (!hasNextCalled && !isEmpty) {
+                    if (tuples != null && tuples.hasNext()) {
+                        next = tuples.next();
+                        hasNextCalled = true;
+                        return true;
+                    } else {
+                        while (iter.hasNext()) {
+                            tuples = 
getTupleReOrderings(iter.next()).iterator();
+                            if (tuples == null) {
+                                throw new IllegalStateException("Plans cannot 
be null!");
+                            }
+                            next = tuples.next();
+                            hasNextCalled = true;
+                            return true;
+                        }
+                        isEmpty = true;
+                        return false;
+                    }
+                } else if (isEmpty) {
+                    return false;
+                } else {
+                    return true;
+                }
+            }
+
+            @Override
+            public TupleExpr next() {
+
+                if (hasNextCalled) {
+                    hasNextCalled = false;
+                    return next;
+                } else if (isEmpty) {
+                    throw new NoSuchElementException();
+                } else {
+                    if (this.hasNext()) {
+                        hasNextCalled = false;
+                        return next;
+                    } else {
+                        throw new NoSuchElementException();
+                    }
+                }
+            }
+
+            @Override
+            public void remove() {
+                throw new UnsupportedOperationException("Cannot delete from 
iterator!");
+            }
+        };
+    }
+    
+    
+    //Give a TupleExpr, return list of join segment permuted TupleExpr
+    public static List<TupleExpr> getTupleReOrderings(TupleExpr te) {
+
+        joinArgs = Maps.newHashMap();
+        filterArgs = Maps.newHashMap();
+        
+        NodeCollector nc = new NodeCollector();
+        te.visit(nc);
+        joinArgs = nc.getPerms();
+        List<Join> joins = Lists.newArrayList(joinArgs.keySet());
+        
+        return getPlans(getReOrderings(joins), te);
+
+    }
+
+    
+    //iterates through the reOrder maps, and for each reOrder map builds a 
new, reordered tupleExpr
+    private static List<TupleExpr> getPlans(List<Map<Join, List<TupleExpr>>> 
reOrderings, TupleExpr te) {
+
+        List<TupleExpr> queryPlans = Lists.newArrayList();
+        PermInserter pm = new PermInserter();
+
+        for (Map<Join, List<TupleExpr>> order : reOrderings) {
+            TupleExpr clone = te.clone();
+            pm.setReOrderMap(order);
+            clone.visit(pm);
+            queryPlans.add(clone);
+        }
+
+        return queryPlans;
+    }
+
+  
+  
+    //recursive method which produces a list of maps.  Each map associates a 
join with
+    //a list of the non-join arguments below it contained in same join 
segment.  The list 
+    //represents an ordering of the
+    //non-join arguments and creating a TupleExpr from this map yields a new 
TupleExpr
+    //whose non-join arguments are permuted
+    private static List<Map<Join, List<TupleExpr>>> getReOrderings(List<Join> 
joins) {
+        Map<Join, List<TupleExpr>> reOrder = Maps.newHashMap();
+        List<Map<Join, List<TupleExpr>>> reOrderings = Lists.newArrayList();
+        getReOrderings(joins, reOrder, reOrderings);
+        return reOrderings;
+
+    }
+
+    private static void getReOrderings(List<Join> joins, Map<Join, 
List<TupleExpr>> reOrder,
+            List<Map<Join, List<TupleExpr>>> reOrderings) {
+
+        if (joins.isEmpty()) {
+            reOrderings.add(reOrder);
+            return;
+        }
+
+        List<Join> joinsCopy = Lists.newArrayList(joins);
+        Join join = joinsCopy.remove(0);
+        List<List<TupleExpr>> joinArgPerms = joinArgs.get(join);
+        for (List<TupleExpr> tupList : joinArgPerms) {
+            Map<Join, List<TupleExpr>> newReOrder = Maps.newHashMap(reOrder);
+            newReOrder.put(join, tupList);
+            getReOrderings(joinsCopy, newReOrder, reOrderings);
+        }
+        
+        return;
+
+    }
+    
+    
+   //creates a map which associates each first join of a TupleExpr join 
segment with all permutations of
+    //the non-join nodes after it.  More specifically, each join is associated 
with a list of TupleExpr
+    //lists, where each list represents an ordering of the non-join nodes 
following the associated join
+    private static class NodeCollector extends 
QueryModelVisitorBase<RuntimeException> {
+
+        private static List<Filter> filterList;
+
+        public Map<Join, List<List<TupleExpr>>> getPerms() {
+            return joinArgs;
+        }
+
+        @Override
+        public void meet(Join node) {
+
+            filterList = Lists.newArrayList();
+            
+            List<TupleExpr> args = Lists.newArrayList();
+            args = getJoinArgs(node, args);
+            List<List<TupleExpr>> argPerms = 
Lists.newArrayList(Collections2.permutations(args));
+            joinArgs.put(node, argPerms);
+            filterArgs.put(node, filterList);
+
+            for (TupleExpr te : args) {
+                if (!(te instanceof StatementPattern) && !(te instanceof 
ExternalTupleSet)) {
+                    te.visit(this);
+                }
+            }
+
+        }
+
+        
+        //get all non-join nodes below tupleExpr in same join segment
+        private static List<TupleExpr> getJoinArgs(TupleExpr tupleExpr, 
List<TupleExpr> joinArgs) {
+            if (tupleExpr instanceof Join) {
+                if (!(((Join) tupleExpr).getLeftArg() instanceof 
FixedStatementPattern)
+                        && !(((Join) tupleExpr).getRightArg() instanceof 
DoNotExpandSP)) {
+                    Join join = (Join) tupleExpr;
+                    getJoinArgs(join.getLeftArg(), joinArgs);
+                    getJoinArgs(join.getRightArg(), joinArgs);
+                } // assumes all filter occur above first join of segment --
+                  // this should be the state
+                  // after PrecompJoinOptimizer is called
+            } else if (tupleExpr instanceof Filter) {
+                filterList.add((Filter) tupleExpr);
+                getJoinArgs(((Filter) tupleExpr).getArg(), joinArgs);
+            } else {
+                joinArgs.add(tupleExpr);
+            }
+
+            return joinArgs;
+        }
+
+    }
+
+    
+
+    //for a given reOrder map, searches through TupleExpr and places each 
reordered collection
+    //of nodes at appropriate join
+    private static class PermInserter extends 
QueryModelVisitorBase<RuntimeException> {
+
+        private Map<Join, List<TupleExpr>> reOrderMap = Maps.newHashMap();
+
+        public void setReOrderMap(Map<Join, List<TupleExpr>> reOrderMap) {
+            this.reOrderMap = reOrderMap;
+        }
+
+        @Override
+        public void meet(Join node) {
+
+            List<TupleExpr> reOrder = reOrderMap.get(node);
+            if (reOrder != null) {
+                List<Filter> filterList = 
Lists.newArrayList(filterArgs.get(node));
+                node.replaceWith(getNewJoin(reOrder, 
getFilterChain(filterList)));
+
+                for (TupleExpr te : reOrder) {
+                    if (!(te instanceof StatementPattern) && !(te instanceof 
ExternalTupleSet)) {
+                        te.visit(this);
+                    }
+                }
+            }
+            super.meet(node);
+        }
+    }
+   
+
+    // chain filters together and return front and back of chain
+    private static List<TupleExpr> getFilterChain(List<Filter> filters) {
+        List<TupleExpr> filterTopBottom = Lists.newArrayList();
+        Filter filterChainTop = null;
+        Filter filterChainBottom = null;
+
+        for (Filter filter : filters) {
+            if (filterChainTop == null) {
+                filterChainTop = filter.clone();
+            } else if (filterChainBottom == null) {
+                filterChainBottom = filter.clone();
+                filterChainTop.setArg(filterChainBottom);
+            } else {
+                Filter newFilter = filter.clone();
+                filterChainBottom.setArg(newFilter);
+                filterChainBottom = newFilter;
+            }
+        }
+        if (filterChainTop != null) {
+            filterTopBottom.add(filterChainTop);
+        }
+        if (filterChainBottom != null) {
+            filterTopBottom.add(filterChainBottom);
+        }
+        return filterTopBottom;
+    }
+
+    // build newJoin node given remaining joinArgs and chain of filters
+    private static TupleExpr getNewJoin(List<TupleExpr> args, List<TupleExpr> 
filterChain) {
+        TupleExpr newJoin;
+        List<TupleExpr> joinArgs = Lists.newArrayList(args);
+
+        if (joinArgs.size() > 1) {
+            if (filterChain.size() > 0) {
+                TupleExpr finalJoinArg = joinArgs.remove(0).clone();
+                TupleExpr tempJoin;
+                TupleExpr temp = filterChain.get(0);
+
+                if (joinArgs.size() > 1) {
+                    tempJoin = new Join(joinArgs.remove(0).clone(), 
joinArgs.remove(0).clone());
+                    for (TupleExpr te : joinArgs) {
+                        tempJoin = new Join(tempJoin, te.clone());
+                    }
+                } else {
+                    tempJoin = joinArgs.remove(0).clone();
+                }
+
+                if (filterChain.size() == 1) {
+                    ((Filter) temp).setArg(tempJoin);
+                } else {
+                    ((Filter) filterChain.get(1)).setArg(tempJoin);
+                }
+                newJoin = new Join(temp, finalJoinArg);
+            } else {
+                newJoin = new Join(joinArgs.remove(0).clone(), 
joinArgs.remove(0).clone());
+
+                for (TupleExpr te : joinArgs) {
+                    newJoin = new Join(newJoin, te.clone());
+                }
+            }
+        } else if (joinArgs.size() == 1) {
+            if (filterChain.size() > 0) {
+                newJoin = filterChain.get(0);
+                if (filterChain.size() == 1) {
+                    ((Filter) newJoin).setArg(joinArgs.get(0).clone());
+                } else {
+                    ((Filter) 
filterChain.get(1)).setArg(joinArgs.get(0).clone());
+                }
+            } else {
+                newJoin = joinArgs.get(0).clone();
+            }
+        } else {
+            throw new IllegalStateException("JoinArgs size cannot be zero.");
+        }
+        return newJoin;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleValidator.java
----------------------------------------------------------------------
diff --git 
a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleValidator.java
 
b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleValidator.java
new file mode 100644
index 0000000..afba7fe
--- /dev/null
+++ 
b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/TupleValidator.java
@@ -0,0 +1,15 @@
+package mvm.rya.indexing.IndexPlanValidator;
+
+import java.util.Iterator;
+import java.util.List;
+
+import org.openrdf.query.algebra.TupleExpr;
+
+public interface TupleValidator {
+
+    public boolean isValid(TupleExpr te);
+    
+    public Iterator<TupleExpr> getValidTuples(Iterator<TupleExpr> tupleList);
+    
+    
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGenerator.java
----------------------------------------------------------------------
diff --git 
a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGenerator.java
 
b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGenerator.java
new file mode 100644
index 0000000..c278fcc
--- /dev/null
+++ 
b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/ValidIndexCombinationGenerator.java
@@ -0,0 +1,651 @@
+package mvm.rya.indexing.IndexPlanValidator;
+
+import java.util.Collections;
+import java.util.Comparator;
+import java.util.Iterator;
+import java.util.List;
+import java.util.NoSuchElementException;
+import java.util.Set;
+
+import mvm.rya.indexing.external.tupleSet.ExternalTupleSet;
+import mvm.rya.indexing.external.tupleSet.SimpleExternalTupleSet;
+
+import org.openrdf.query.MalformedQueryException;
+import org.openrdf.query.algebra.Filter;
+import org.openrdf.query.algebra.Projection;
+import org.openrdf.query.algebra.QueryModelNode;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+import org.openrdf.query.parser.ParsedQuery;
+import org.openrdf.query.parser.sparql.SPARQLParser;
+import com.google.common.base.Joiner;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Sets;
+
+public class ValidIndexCombinationGenerator {
+    
+    
+    private TupleExpr query;
+    private Set<String> invalidCombos = Sets.newTreeSet();
+    private Set<QueryModelNode> spFilterSet;
+    
+    
+    public ValidIndexCombinationGenerator(TupleExpr query) {
+        this.query = query;
+        SpFilterCollector sfc = new SpFilterCollector();
+        query.visit(sfc);
+        spFilterSet = sfc.getSpFilterSet();
+    }
+    
+    
+    
+    
+    public Iterator<List<ExternalTupleSet>> 
getValidIndexCombos(List<ExternalTupleSet> indexSet) {
+
+        Collections.shuffle(indexSet);
+        final List<ExternalTupleSet> list = indexSet;
+        final Iterator<List<Integer>> iter = getValidCombos(list);
+
+        return new Iterator<List<ExternalTupleSet>>() {
+
+            private List<ExternalTupleSet> next = null;
+            private List<Integer> nextCombo = null;
+            private boolean hasNextCalled = false;
+            private boolean isEmpty = false;
+
+            @Override
+            public boolean hasNext() {
+
+                if (!hasNextCalled && !isEmpty) {
+                    if (!iter.hasNext()) {
+                        isEmpty = true;
+                        return false;
+                    } else {
+                        nextCombo = iter.next();
+                        List<ExternalTupleSet> indexCombo = 
Lists.newArrayList();
+                        for (Integer i : nextCombo) {
+                            indexCombo.add(list.get(i));
+                        }
+                        next = indexCombo;
+                        hasNextCalled = true;
+                        return true;
+
+                    }
+
+                } else if (isEmpty) {
+                    return false;
+                } else {
+                    return true;
+                }
+            }
+
+            @Override
+            public List<ExternalTupleSet> next() {
+
+                if (hasNextCalled) {
+                    hasNextCalled = false;
+                    return next;
+                } else if(isEmpty) {
+                    throw new NoSuchElementException();
+                }else {
+                    if (this.hasNext()) {
+                        hasNextCalled = false;
+                        return next;
+                    } else {
+                        throw new NoSuchElementException();
+                    }
+                }
+            }
+
+            @Override
+            public void remove() {
+
+                throw new UnsupportedOperationException("Cannot delete from 
iterator!");
+
+            }
+
+        };
+
+    }
+    
+    
+    
+    private Iterator<List<Integer>> getValidCombos(List<ExternalTupleSet> 
indexList) {
+        
+        
+        final List<ExternalTupleSet> list = indexList;
+        final int indexSize = list.size();
+        final Iterator<List<Integer>> iter = getCombos(indexSize);
+        
+        
+        return new Iterator<List<Integer>>() {
+
+            private List<Integer> next = null;
+            private boolean hasNextCalled = false;
+            private boolean isEmpty = false;
+
+            @Override
+            public boolean hasNext() {
+                if (!hasNextCalled && !isEmpty) {
+
+                    while (iter.hasNext()) {
+                        List<Integer> tempNext = iter.next();
+                        if (isValid(tempNext, list)) {
+                            next = tempNext;
+                            hasNextCalled = true;
+                            return true;
+                        }
+
+                    }
+
+                    isEmpty = true;
+                    return false;
+
+                } else if (isEmpty) {
+                    return false;
+                } else {
+                    return true;
+                }
+            }
+
+            @Override
+            public List<Integer> next() {
+
+                if (hasNextCalled) {
+                    hasNextCalled = false;
+                    return next;
+                } else if (isEmpty) {
+                    throw new NoSuchElementException();
+                } else {
+                    if (this.hasNext()) {
+                        hasNextCalled = false;
+                        return next;
+                    } else {
+                        throw new NoSuchElementException();
+                    }
+
+                }
+
+            }
+
+            @Override
+            public void remove() {
+
+                throw new UnsupportedOperationException("Cannot delete from 
iterator!");
+
+            }
+
+        };
+    }
+    
+    
+    
+    
+    
+    
+    private Iterator<List<Integer>> getCombos(int indexListSize) {
+
+        final int indexSize = indexListSize;
+        final int maxSubListSize = spFilterSet.size() / 2;
+
+        return new Iterator<List<Integer>>() {
+
+            private List<Integer> next = null;
+            private boolean hasNextCalled = false;
+            private boolean isEmpty = false;
+            private int subListSize = Math.min(maxSubListSize, indexSize) + 1;
+            Iterator<List<Integer>> subList = null;
+
+            @Override
+            public boolean hasNext() {
+
+                if (!hasNextCalled && !isEmpty) {
+                    if (subList != null && subList.hasNext()) {
+                        next = subList.next();
+                        hasNextCalled = true;
+                        return true;
+                    } else {
+                        subListSize--;
+                        if (subListSize == 0) {
+                            isEmpty = true;
+                            return false;
+                        }
+                        subList = getCombos(subListSize, indexSize);
+                        if (subList == null) {
+                            throw new IllegalStateException("Combos cannot be 
null!");
+                        }
+                        next = subList.next();
+                        hasNextCalled = true;
+                        return true;
+
+                    }
+                } else if (isEmpty) {
+                    return false;
+                } else {
+                    return true;
+                }
+            }
+
+            @Override
+            public List<Integer> next() {
+
+                if (hasNextCalled) {
+                    hasNextCalled = false;
+                    return next;
+                } else if (isEmpty) {
+                    throw new NoSuchElementException();
+                } else {
+                    if (this.hasNext()) {
+                        hasNextCalled = false;
+                        return next;
+                    } else {
+                        throw new NoSuchElementException();
+                    }
+
+                }
+
+            }
+
+            @Override
+            public void remove() {
+                throw new UnsupportedOperationException("Cannot delete from 
iterator!");
+            }
+
+        };
+
+    }
+    
+    
+    
+    private Iterator<List<Integer>> getCombos(int subListSize, int 
indexListSize) {
+        
+        if(subListSize > indexListSize) {
+            throw new IllegalArgumentException("Sublist size must be less than 
or equal to list size!");
+        }
+        
+        final int subSize = subListSize;
+        final int indexSize = indexListSize;
+        
+        return new Iterator<List<Integer>>() {
+
+            private List<Integer> next = null;
+            private List<Integer> tempList = Lists.newArrayList();
+            private boolean calledHasNext = false;
+            private boolean isEmpty = false;
+            
+            @Override
+            public boolean hasNext() {
+
+                if (!calledHasNext && !isEmpty) {
+                    if (next == null) {
+                        for (int i = 0; i < subSize; i++) {
+                            tempList.add(i);
+                        }
+                        next = tempList;
+                        calledHasNext = true;
+                        return true;
+                    } else {
+                        next = getNext(next, indexSize - 1);
+                        if (next == null) {
+                            isEmpty = true;
+                            return false;
+                        } else {
+                            calledHasNext = true;
+                            return true;
+                        }
+
+                    }
+                } else if(isEmpty) {  
+                    return false;
+                } else {
+                    return true;
+                }
+
+            }
+
+            @Override
+            public List<Integer> next() {
+
+                if (calledHasNext) {
+                    calledHasNext = false;
+                    return next;
+                } else if (isEmpty) {
+                    throw new NoSuchElementException();
+                } else {
+                    if (this.hasNext()) {
+                        calledHasNext = false;
+                        return next;
+                    } else {
+                        throw new NoSuchElementException();
+                    }
+                }
+            }
+            @Override
+            public void remove() {
+                throw new UnsupportedOperationException();
+                
+            }
+            
+            
+            
+        };
+    }
+    
+
+    
+    
+    
+    
+    private List<Integer> getNext(List<Integer> prev, int maxInt) {
+        
+        List<Integer> returnList = Lists.newArrayList();
+        int size = prev.size();
+        int incrementPos = -1;
+        int incrementVal = 0;
+        
+        for(int i = 0; i < size; i++) {
+            if(prev.get(size-(i+1)) != maxInt - i) {
+                incrementPos = size - (i+1);
+                break;
+            }
+        }
+        
+        if (incrementPos == -1) {
+            return null;
+        } else {
+
+            incrementVal = prev.get(incrementPos);
+            for (int i = 0; i < incrementPos; i++) {
+                returnList.add(prev.get(i));
+            }
+
+            for (int j = incrementPos; j < size; j++) {
+                returnList.add(++incrementVal);
+            }
+
+            return returnList;
+        }
+    }
+    
+    
+    
+    
+    private boolean isValid(List<Integer> combo, List<ExternalTupleSet> 
indexList) {
+        
+        String s1 = Joiner.on("\u0000").join(combo).trim();
+        
+        if(invalidCombos.contains(s1)) {
+            return false;
+        } else {
+            int valid = indicesDisjoint(combo, indexList);
+            
+            if (valid >= 0) {
+                String s2 = "";
+                for (int i = 0; i < valid + 1; i++) {
+                    if (s2.length() == 0) {
+                        s2 = s2 + combo.get(i);
+                    } else {
+                        s2 = s2 + "\u0000" + combo.get(i);
+                    }
+                }
+                invalidCombos.add(s2);
+
+                for (int i = valid + 1; i < combo.size(); i++) {
+                    s2 = s2 + "\u0000" + combo.get(i);
+                    invalidCombos.add(s2);
+                }
+
+                return false;
+            } else {
+                return true;
+            }
+        }
+        
+        
+    }
+    
+    
+    
+    private int indicesDisjoint(List<Integer> combo, List<ExternalTupleSet> 
indexList) {
+        
+        Set<QueryModelNode> indexNodes = Sets.newHashSet();
+        Set<QueryModelNode> tempNodes;
+        TupleExpr temp;
+        
+        
+        int j = 0;
+        for(Integer i: combo) {
+            temp = indexList.get(i).getTupleExpr();
+            SpFilterCollector spf = new SpFilterCollector();
+            temp.visit(spf);
+            tempNodes = spf.getSpFilterSet();
+            if(Sets.intersection(indexNodes, tempNodes).size() == 0) {
+                indexNodes = Sets.union(indexNodes, tempNodes);
+                if(indexNodes.size() > spFilterSet.size()) {
+                    return j;
+                }
+            } else {
+                return j;
+            }
+            j++;
+        }
+        
+        return -1;
+    }
+    
+    
+    
+    
+    public static void main(String[] args) {
+        
+        
+        String q1 = ""//
+                + "SELECT ?f ?m ?d " //
+                + "{" //
+                + "  ?f a ?m ."//
+                + "  ?m <http://www.w3.org/2000/01/rdf-schema#label> ?d ."//
+                + "  ?d <uri:talksTo> ?f . "//
+                + "  ?f <uri:hangOutWith> ?m ." //
+                + "  ?m <uri:hangOutWith> ?d ." //
+                + "  ?f <uri:associatesWith> ?m ." //
+                + "  ?m <uri:associatesWith> ?d ." //
+                + "}";//
+        
+        
+        String q2 = ""//
+                + "SELECT ?t ?s ?u " //
+                + "{" //
+                + "  ?s a ?t ."//
+                + "  ?t <http://www.w3.org/2000/01/rdf-schema#label> ?u ."//
+                + "  ?u <uri:talksTo> ?s . "//
+                + "}";//
+        
+        
+        String q3 = ""//
+                + "SELECT ?s ?t ?u " //
+                + "{" //
+                + "  ?s <uri:hangOutWith> ?t ." //
+                + "  ?t <uri:hangOutWith> ?u ." //
+                + "}";//
+        
+        String q4 = ""//
+                + "SELECT ?s ?t ?u " //
+                + "{" //
+                + "  ?s <uri:associatesWith> ?t ." //
+                + "  ?t <uri:associatesWith> ?u ." //
+                + "}";//
+        
+        
+        String q5 = ""//
+                + "SELECT ?t ?s ?u " //
+                + "{" //
+                + "  ?s a ?t ."//
+                + "  ?t <http://www.w3.org/2000/01/rdf-schema#label> ?u ."//
+                + "  ?u <uri:talksTo> ?s . "//
+                + "  ?s <uri:hangOutWith> ?t ." //
+                + "  ?t <uri:hangOutWith> ?u ." //
+                + "}";//
+        
+        String q6 = ""//
+                + "SELECT ?s ?t ?u " //
+                + "{" //
+                + "  ?s <uri:associatesWith> ?t ." //
+                + "  ?t <uri:associatesWith> ?u ." //
+                + "  ?s <uri:hangOutWith> ?t ." //
+                + "  ?t <uri:hangOutWith> ?u ." //
+                + "}";//
+        
+        
+        String q7 = ""//
+                + "SELECT ?s ?t ?u " //
+                + "{" //
+                + "  ?s <uri:associatesWith> ?t ." //
+                + "  ?t <uri:associatesWith> ?u ." //
+                + "  ?t <uri:hangOutWith> ?u ." //
+                + "}";//
+        
+        
+        
+        String q8 = ""//
+                + "SELECT ?t ?s ?u " //
+                + "{" //
+                + "  ?s a ?t ."//
+                + "  ?t <http://www.w3.org/2000/01/rdf-schema#label> ?u ."//
+                + "  ?u <uri:talksTo> ?s . "//
+                + "  ?s <uri:associatesWith> ?t ." //
+                + "}";//
+        
+        
+        String q9 = ""//
+                + "SELECT ?t ?s ?u " //
+                + "{" //
+                + "  ?s a ?t ."//
+                + "  ?t <http://www.w3.org/2000/01/rdf-schema#label> ?u ."//
+                + "}";//
+        
+        
+        
+        
+        
+        
+        
+        
+
+        SPARQLParser parser = new SPARQLParser();
+        ParsedQuery pq1 = null;
+        ParsedQuery pq2 = null;
+        ParsedQuery pq3 = null;
+        ParsedQuery pq4 = null;
+        ParsedQuery pq5 = null;
+        ParsedQuery pq6 = null;
+        ParsedQuery pq7 = null;
+        ParsedQuery pq8 = null;
+        ParsedQuery pq9 = null;
+        
+        SimpleExternalTupleSet extTup1 = null;
+        SimpleExternalTupleSet extTup2 = null;
+        SimpleExternalTupleSet extTup3 = null;
+        SimpleExternalTupleSet extTup4 = null;
+        SimpleExternalTupleSet extTup5 = null;
+        SimpleExternalTupleSet extTup6 = null;
+        SimpleExternalTupleSet extTup7 = null;
+        SimpleExternalTupleSet extTup8 = null;
+        
+        
+        
+        
+        
+        try {
+            pq1 = parser.parseQuery(q1, null);
+            pq2 = parser.parseQuery(q2, null);
+            pq3 = parser.parseQuery(q3, null);
+            pq4 = parser.parseQuery(q4, null);
+            pq5 = parser.parseQuery(q5, null);
+            pq6 = parser.parseQuery(q6, null);
+            pq7 = parser.parseQuery(q7, null);
+            pq8 = parser.parseQuery(q8, null);
+            pq9 = parser.parseQuery(q9, null);
+           
+
+            extTup1 = new SimpleExternalTupleSet((Projection) 
pq2.getTupleExpr());
+            extTup2 = new SimpleExternalTupleSet((Projection) 
pq3.getTupleExpr());
+            extTup3 = new SimpleExternalTupleSet((Projection) 
pq4.getTupleExpr());
+            extTup4 = new SimpleExternalTupleSet((Projection) 
pq5.getTupleExpr());
+            extTup5 = new SimpleExternalTupleSet((Projection) 
pq6.getTupleExpr());
+            extTup6 = new SimpleExternalTupleSet((Projection) 
pq7.getTupleExpr());
+            extTup7 = new SimpleExternalTupleSet((Projection) 
pq8.getTupleExpr());
+            extTup8 = new SimpleExternalTupleSet((Projection) 
pq9.getTupleExpr());
+            
+          
+        } catch (MalformedQueryException e) {
+            // TODO Auto-generated catch block
+            e.printStackTrace();
+        }
+        
+        List<ExternalTupleSet> indexList = Lists.newArrayList();
+        indexList.add(extTup1);
+        indexList.add(extTup2);
+        indexList.add(extTup3);
+        indexList.add(extTup4);
+        indexList.add(extTup5);
+        indexList.add(extTup6);
+        indexList.add(extTup7);
+        indexList.add(extTup8);
+        
+        
+        ValidIndexCombinationGenerator vic = new 
ValidIndexCombinationGenerator(pq1.getTupleExpr());
+        Iterator<List<ExternalTupleSet>> combos = 
vic.getValidIndexCombos(indexList);
+        int size = 0;
+        while(combos.hasNext()) {
+            combos.hasNext();
+            size++;
+            List<ExternalTupleSet> eSet = combos.next();
+            System.out.println("********************************************");
+            for(ExternalTupleSet e: eSet) {
+                System.out.println(e.getTupleExpr());
+            }
+            System.out.println("********************************************");
+        }
+        
+        System.out.println("size is " + size + " has next " + 
combos.hasNext());
+    }
+    
+    
+    
+    
+    
+    private static class SpFilterCollector extends 
QueryModelVisitorBase<RuntimeException> {
+
+        private Set<QueryModelNode> spFilterSet = Sets.newHashSet();
+
+        
+        public int getNodeNumber() {
+            return spFilterSet.size();
+        }
+        
+        
+        public Set<QueryModelNode> getSpFilterSet() {
+            return spFilterSet;
+        }
+        
+        
+        @Override
+        public void meet(StatementPattern node) {
+            
+            spFilterSet.add(node);
+            return;
+            
+        }
+        
+        
+        @Override
+        public void meet(Filter node) {
+            
+            spFilterSet.add(node.getCondition());
+            node.getArg().visit(this);
+        }  
+        
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPruner.java
----------------------------------------------------------------------
diff --git 
a/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPruner.java
 
b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPruner.java
new file mode 100644
index 0000000..b110522
--- /dev/null
+++ 
b/extras/indexing/src/main/java/mvm/rya/indexing/IndexPlanValidator/VarConstantIndexListPruner.java
@@ -0,0 +1,151 @@
+package mvm.rya.indexing.IndexPlanValidator;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import mvm.rya.indexing.external.tupleSet.ExternalTupleSet;
+
+import org.openrdf.query.algebra.Filter;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.TupleExpr;
+import org.openrdf.query.algebra.ValueConstant;
+import org.openrdf.query.algebra.Var;
+import org.openrdf.query.algebra.helpers.QueryModelVisitorBase;
+
+import com.beust.jcommander.internal.Maps;
+import com.google.common.collect.Sets;
+
+
+
+
+public class VarConstantIndexListPruner implements IndexListPruner {
+
+    private Map<String, Integer> queryConstantMap;
+    private int querySpCount;
+    private int queryFilterCount;
+
+    public VarConstantIndexListPruner(TupleExpr te) {
+
+        ConstantCollector cc = new ConstantCollector();
+        te.visit(cc);
+        this.queryConstantMap = cc.getConstantMap();
+        querySpCount = cc.getSpCount();
+        queryFilterCount = cc.getFilterCount();
+    }
+
+    public Set<ExternalTupleSet> getRelevantIndices(List<ExternalTupleSet> 
indexList) {
+
+        Set<ExternalTupleSet> relIndexSet = Sets.newHashSet();
+
+        for (ExternalTupleSet e : indexList) {
+
+            if (isRelevant(e.getTupleExpr())) {
+                relIndexSet.add(e);
+            }
+
+        }
+
+        return relIndexSet;
+    }
+
+    private boolean isRelevant(TupleExpr index) {
+
+        ConstantCollector cc = new ConstantCollector();
+        index.visit(cc);
+        
+        Map<String, Integer> indexConstantMap = cc.getConstantMap();
+        int indexSpCount = cc.getSpCount();
+        int indexFilterCount = cc.getFilterCount();
+        Set<String> indexConstants = indexConstantMap.keySet();
+
+        if ((indexSpCount > querySpCount) || (indexFilterCount > 
queryFilterCount) 
+                || !(Sets.intersection(indexConstants, 
queryConstantMap.keySet()).equals(indexConstants))) {
+            return false;
+        }
+
+        for (String s : indexConstants) {
+            if (indexConstantMap.get(s) > queryConstantMap.get(s)) {
+                return false;
+            }
+        }
+
+        return true;
+    }
+
+
+    private static class ConstantCollector extends 
QueryModelVisitorBase<RuntimeException> {
+
+        private Map<String, Integer> constantMap = Maps.newHashMap();
+        private int spCount = 0;
+        private int filterCount = 0;
+        
+        
+        @Override
+        public void meet(StatementPattern node) throws RuntimeException {
+            
+           spCount++;
+           super.meet(node);
+
+        }
+        
+        
+        @Override
+        public void meet(Filter node) throws RuntimeException {
+            
+           filterCount++;
+           super.meet(node);
+
+        }
+        
+        
+        
+        
+        @Override
+        public void meet(Var node) throws RuntimeException {
+            
+            if (node.isConstant()) {
+                String key = node.getValue().toString();
+                if(constantMap.containsKey(key)){
+                    int count = constantMap.get(key);
+                    count += 1;
+                    constantMap.put(key, count);
+                } else {
+                    constantMap.put(key, 1);
+                }
+            }
+
+        }
+        
+        
+        public void meet(ValueConstant node) throws RuntimeException {
+            
+            String key = node.getValue().toString();
+            
+            if(constantMap.containsKey(key)) {
+                int count = constantMap.get(key);
+                count += 1;
+                constantMap.put(key, count);
+            } else {
+                constantMap.put(key,1);
+            }
+            
+        }
+        
+
+        public Map<String, Integer> getConstantMap() {
+            return constantMap;
+        }
+        
+        public int getSpCount(){
+            return spCount;
+        }
+
+        
+        public int getFilterCount() {
+            return filterCount;
+        }
+        
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexing/src/main/java/mvm/rya/indexing/IndexingExpr.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/IndexingExpr.java 
b/extras/indexing/src/main/java/mvm/rya/indexing/IndexingExpr.java
new file mode 100644
index 0000000..553c3b8
--- /dev/null
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IndexingExpr.java
@@ -0,0 +1,76 @@
+package mvm.rya.indexing;
+
+import java.util.Set;
+
+import mvm.rya.indexing.accumulo.geo.GeoTupleSet;
+
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Var;
+
+import com.google.common.collect.Sets;
+
+public class IndexingExpr {
+
+    private final URI function;
+    private final Value[] arguments;
+    private final StatementPattern spConstraint;
+
+    public IndexingExpr(URI function, StatementPattern spConstraint, Value... 
arguments) {
+        this.function = function;
+        this.arguments = arguments;
+        this.spConstraint = spConstraint;
+    }
+
+    public URI getFunction() {
+        return function;
+    }
+
+    public Value[] getArguments() {
+        return arguments;
+    }
+
+    public StatementPattern getSpConstraint() {
+        return spConstraint;
+    }
+
+
+    public Set<String> getBindingNames() {
+        //resource and match variable for search are already included as 
standard result-bindings
+        Set<String> bindings = Sets.newHashSet();
+
+        for(Var v: spConstraint.getVarList()) {
+            if(!v.isConstant()) {
+                bindings.add(v.getName());
+            }
+        }
+        return bindings;
+    }
+    
+    
+    @Override
+    public boolean equals(Object other) {
+        if (!(other instanceof IndexingExpr)) {
+            return false;
+        }
+        IndexingExpr arg = (IndexingExpr) other;
+        return (this.function.equals(arg.function)) && 
(this.spConstraint.equals(arg.spConstraint))
+                && (this.arguments.equals(arg.arguments));
+    }
+    
+    
+    @Override
+    public int hashCode() {
+        int result = 17;
+        result = 31*result + function.hashCode();
+        result = 31*result + spConstraint.hashCode();
+        result = 31*result + arguments.hashCode();
+        
+        return result;
+    }
+    
+}
+
+    
+

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexing/src/main/java/mvm/rya/indexing/IndexingFunctionRegistry.java
----------------------------------------------------------------------
diff --git 
a/extras/indexing/src/main/java/mvm/rya/indexing/IndexingFunctionRegistry.java 
b/extras/indexing/src/main/java/mvm/rya/indexing/IndexingFunctionRegistry.java
new file mode 100644
index 0000000..69117fa
--- /dev/null
+++ 
b/extras/indexing/src/main/java/mvm/rya/indexing/IndexingFunctionRegistry.java
@@ -0,0 +1,117 @@
+package mvm.rya.indexing;
+
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+import mvm.rya.indexing.accumulo.geo.GeoConstants;
+
+import org.openrdf.model.URI;
+import org.openrdf.model.impl.URIImpl;
+import org.openrdf.query.algebra.ValueConstant;
+import org.openrdf.query.algebra.ValueExpr;
+import org.openrdf.query.algebra.Var;
+
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+
+public class IndexingFunctionRegistry {
+
+    
+    private static final Map<URI, FUNCTION_TYPE> SEARCH_FUNCTIONS = 
Maps.newHashMap();
+    
+    static {
+        
+        String TEMPORAL_NS = "tag:rya-rdf.org,2015:temporal#";         
+
+        SEARCH_FUNCTIONS.put(new 
URIImpl(TEMPORAL_NS+"after"),FUNCTION_TYPE.TEMPORAL);
+        SEARCH_FUNCTIONS.put(new URIImpl(TEMPORAL_NS+"before"), 
FUNCTION_TYPE.TEMPORAL);
+        SEARCH_FUNCTIONS.put(new URIImpl(TEMPORAL_NS+"equals"), 
FUNCTION_TYPE.TEMPORAL);
+        SEARCH_FUNCTIONS.put(new URIImpl(TEMPORAL_NS+"beforeInterval"), 
FUNCTION_TYPE.TEMPORAL);
+        SEARCH_FUNCTIONS.put(new URIImpl(TEMPORAL_NS+"afterInterval"), 
FUNCTION_TYPE.TEMPORAL);
+        SEARCH_FUNCTIONS.put(new URIImpl(TEMPORAL_NS+"insideInterval"), 
FUNCTION_TYPE.TEMPORAL);
+        SEARCH_FUNCTIONS.put(new URIImpl(TEMPORAL_NS+"hasBeginningInterval"), 
FUNCTION_TYPE.TEMPORAL);
+        SEARCH_FUNCTIONS.put(new URIImpl(TEMPORAL_NS+"hasEndInterval"), 
FUNCTION_TYPE.TEMPORAL);
+        
+        
+        SEARCH_FUNCTIONS.put(new URIImpl("http://rdf.useekm.com/fts#text";), 
FUNCTION_TYPE.FREETEXT);
+
+        SEARCH_FUNCTIONS.put(GeoConstants.GEO_SF_EQUALS, FUNCTION_TYPE.GEO);
+        SEARCH_FUNCTIONS.put(GeoConstants.GEO_SF_DISJOINT, FUNCTION_TYPE.GEO);
+        SEARCH_FUNCTIONS.put(GeoConstants.GEO_SF_INTERSECTS, 
FUNCTION_TYPE.GEO);
+        SEARCH_FUNCTIONS.put(GeoConstants.GEO_SF_TOUCHES, FUNCTION_TYPE.GEO);
+        SEARCH_FUNCTIONS.put(GeoConstants.GEO_SF_WITHIN, FUNCTION_TYPE.GEO);
+        SEARCH_FUNCTIONS.put(GeoConstants.GEO_SF_CONTAINS, FUNCTION_TYPE.GEO);
+        SEARCH_FUNCTIONS.put(GeoConstants.GEO_SF_OVERLAPS, FUNCTION_TYPE.GEO);
+        SEARCH_FUNCTIONS.put(GeoConstants.GEO_SF_CROSSES, FUNCTION_TYPE.GEO);
+
+    }
+    
+    public enum FUNCTION_TYPE {GEO, TEMPORAL, FREETEXT};
+    
+    
+    public static Set<URI> getFunctions() {
+        return SEARCH_FUNCTIONS.keySet();
+    }
+    
+    
+    public static Var getResultVarFromFunctionCall(URI function, 
List<ValueExpr> args) {
+        
+        FUNCTION_TYPE type = SEARCH_FUNCTIONS.get(function);
+        
+        switch(type) {
+        case GEO: 
+            return findBinaryResultVar(args);
+        case FREETEXT:
+            return findLiteralResultVar(args);
+        case TEMPORAL:
+            return findBinaryResultVar(args);
+        default:
+            return null;
+        }
+        
+    }
+    
+    
+    public static FUNCTION_TYPE getFunctionType(URI func) {
+        return SEARCH_FUNCTIONS.get(func);
+    }
+    
+    
+    
+    private static boolean isUnboundVariable(ValueExpr expr) {
+        return expr instanceof Var && !((Var)expr).hasValue();
+    }
+    
+    private static boolean isConstant(ValueExpr expr) {
+        return expr instanceof ValueConstant || (expr instanceof Var && 
((Var)expr).hasValue());
+    }
+    
+    
+    private static Var findBinaryResultVar(List<ValueExpr> args) {
+     
+        if (args.size() >= 2) {
+            ValueExpr arg1 = args.get(0);
+            ValueExpr arg2 = args.get(1);
+            if (isUnboundVariable(arg1) && isConstant(arg2))
+                return (Var) arg1;
+            else if (isUnboundVariable(arg2) && isConstant(arg1))
+                return (Var) arg2;
+        }
+        return null;
+    }
+    
+    
+    private static Var findLiteralResultVar(List<ValueExpr> args) {
+        if (args.size() >= 2) {
+            ValueExpr arg1 = args.get(0);
+            ValueExpr arg2 = args.get(1);
+            if (isUnboundVariable(arg1) && isConstant(arg2))
+                return (Var)arg1;
+        }
+        return null;
+    }
+    
+    
+    
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexing/src/main/java/mvm/rya/indexing/IteratorFactory.java
----------------------------------------------------------------------
diff --git 
a/extras/indexing/src/main/java/mvm/rya/indexing/IteratorFactory.java 
b/extras/indexing/src/main/java/mvm/rya/indexing/IteratorFactory.java
new file mode 100644
index 0000000..16b869d
--- /dev/null
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/IteratorFactory.java
@@ -0,0 +1,139 @@
+package mvm.rya.indexing;
+
+import info.aduna.iteration.CloseableIteration;
+
+import java.util.Collection;
+import java.util.Collections;
+import java.util.HashSet;
+import java.util.NoSuchElementException;
+import java.util.Set;
+
+import org.openrdf.model.Resource;
+import org.openrdf.model.Statement;
+import org.openrdf.model.URI;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.algebra.QueryModelNode;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Var;
+import org.openrdf.query.impl.MapBindingSet;
+
+
+//Given StatementPattern constraint and SearchFunction associated with an 
Indexing Node,
+//creates appropriate StatementConstraints object from StatementPattern 
constraint and
+//binding set and then uses SearchFunction to delegate query to appropriate 
index.
+//Resulting iterator over statements is then converted to an iterator over 
binding sets
+public class IteratorFactory {
+
+    public static CloseableIteration<BindingSet, QueryEvaluationException> 
getIterator(final StatementPattern match, 
+            final BindingSet bindings, final String queryText, final 
SearchFunction searchFunction) {
+        return new CloseableIteration<BindingSet, QueryEvaluationException>() {
+
+            private boolean isClosed = false;
+            private CloseableIteration<Statement, QueryEvaluationException> 
statementIt = null;
+
+            private String subjectBinding = match.getSubjectVar().getName();
+            private String predicateBinding = 
match.getPredicateVar().getName();
+            private String objectBinding = match.getObjectVar().getName();
+            private String contextBinding = null;
+
+            private void performQuery() throws QueryEvaluationException {
+
+                StatementContraints contraints = new StatementContraints();
+
+                // get the context (i.e. named graph) of the statement and use 
that in the query
+                QueryModelNode parentNode = 
match.getSubjectVar().getParentNode();
+                if (parentNode instanceof StatementPattern) {
+                    StatementPattern parentStatement = (StatementPattern) 
parentNode;
+                    Var contextVar = parentStatement.getContextVar();
+                    if (contextVar != null) {
+                        contextBinding = contextVar.getName();
+                        Resource context = (Resource) contextVar.getValue();
+                        contraints.setContext(context);
+                    }
+                }
+
+                // get the subject constraint
+                if (match.getSubjectVar().isConstant()) {
+                    // get the subject binding from the filter/statement pair
+                    Resource subject = (Resource) 
match.getSubjectVar().getValue();
+                    contraints.setSubject(subject);
+                } else if (bindings.hasBinding(subjectBinding)) {
+                    // get the subject binding from the passed in bindings (eg 
from other statements/parts of the tree)
+                    Resource subject = (Resource) 
bindings.getValue(subjectBinding);
+                    contraints.setSubject(subject);
+                }
+
+                // get the predicate constraint
+                if (match.getPredicateVar().isConstant()) {
+                    // get the predicate binding from the filter/statement pair
+                    Set<URI> predicates = new 
HashSet<URI>(getPredicateRestrictions(match.getPredicateVar()));
+                    contraints.setPredicates(predicates);
+                } else if (bindings.hasBinding(predicateBinding)) {
+                    // get the predicate binding from the passed in bindings 
(eg from other statements/parts of the tree)
+                    URI predicateUri = (URI) 
bindings.getValue(predicateBinding);
+                    Set<URI> predicates = Collections.singleton(predicateUri);
+                    contraints.setPredicates(predicates);
+                }
+
+                statementIt = searchFunction.performSearch(queryText, 
contraints);
+            }
+
+            @Override
+            public boolean hasNext() throws QueryEvaluationException {
+                if (statementIt == null) {
+                    performQuery();
+                }
+                return statementIt.hasNext();
+            }
+
+            @Override
+            public BindingSet next() throws QueryEvaluationException {
+                if (!hasNext() || isClosed) {
+                    throw new NoSuchElementException();
+                }
+
+                Statement statment = statementIt.next();
+
+                MapBindingSet bset = new MapBindingSet();
+                if (!subjectBinding.startsWith("-const"))
+                    bset.addBinding(subjectBinding, statment.getSubject());
+                if (!predicateBinding.startsWith("-const"))
+                    bset.addBinding(predicateBinding, statment.getPredicate());
+                if (!objectBinding.startsWith("-const"))
+                    bset.addBinding(objectBinding, statment.getObject());
+                if (contextBinding != null && 
!contextBinding.startsWith("-const"))
+                    bset.addBinding(contextBinding, statment.getContext());
+
+                // merge with other bindings.
+                for (String name : bindings.getBindingNames()) {
+                    bset.addBinding(name, bindings.getValue(name));
+                }
+
+                return bset;
+            }
+
+            @Override
+            public void remove() throws QueryEvaluationException {
+                throw new UnsupportedOperationException();
+
+            }
+
+            @Override
+            public void close() throws QueryEvaluationException {
+                if (statementIt != null) {
+                    statementIt.close();
+                }
+                isClosed = true;
+            }
+
+        };
+
+    }
+   
+    public static Collection<URI> getPredicateRestrictions(Var predicate) {
+        if (predicate.hasValue())
+            return Collections.singleton((URI) predicate.getValue());
+        return Collections.emptyList();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexing/src/main/java/mvm/rya/indexing/KeyParts.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/KeyParts.java 
b/extras/indexing/src/main/java/mvm/rya/indexing/KeyParts.java
new file mode 100644
index 0000000..ca27c4a
--- /dev/null
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/KeyParts.java
@@ -0,0 +1,313 @@
+package mvm.rya.indexing;
+
+import java.util.Iterator;
+import java.util.LinkedList;
+import java.util.List;
+
+import mvm.rya.indexing.accumulo.Md5Hash;
+import mvm.rya.indexing.accumulo.StatementSerializer;
+
+import org.apache.accumulo.core.data.Value;
+import org.apache.commons.codec.binary.StringUtils;
+import org.apache.hadoop.io.Text;
+import org.openrdf.model.Resource;
+import org.openrdf.model.Statement;
+import org.openrdf.model.URI;
+import org.openrdf.model.impl.ContextStatementImpl;
+import org.openrdf.model.impl.StatementImpl;
+import org.openrdf.model.impl.URIImpl;
+
+import com.google.common.collect.Constraints;
+
+/**
+ * Store and format the various temporal index keys.
+ * Row Keys are in these two forms, where [x] denotes x is optional:
+ *             rowkey = contraintPrefix datetime
+ *             rowkey = datetime 0x/00 uniquesuffix
+ *             contraintPrefix = 0x/00 hash([subject][predicate])
+ *             uniquesuffix = some bytes to make it unique, like 
hash(statement).
+ * 
+ * The instance is in one of two modes depending on the constructor:
+ *             storage mode  -- construct with a triple statement, get an 
iterator of keys to store.  
+ *             query mode        -- construct with a statement and query 
constraints, get the key prefix to search.
+ * 
+ * this has the flavor of an immutable object
+ * This is independent of the underlying database engine
+ * 
+ * @author David.Lotts
+ *
+ */
+public class KeyParts implements Iterable<KeyParts> { 
+       private static final String CQ_S_P_AT = "spo";
+       private static final String CQ_P_AT = "po";
+       private static final String CQ_S_AT = "so";
+       private static final String CQ_O_AT = "o";
+       public static final String CQ_BEGIN = "begin";
+       public static final String CQ_END = "end";
+        
+        public static final byte[] HASH_PREFIX = new byte[] {0};
+        public static final byte[] HASH_PREFIX_FOLLOWING = new byte[] {1};
+
+               public final Text cf;
+               public final Text cq;
+               public final Text constraintPrefix; // subject and/or predicate
+               final Text storeKey; // subject and/or predicate
+               final private TemporalInstant instant;
+               final private Statement statement;
+               final private boolean queryMode;
+               KeyParts(Text constraintPrefix, TemporalInstant instant, String 
cf, String cq) {
+                       this.queryMode = true; // query mode
+                       this.storeKey = null;
+                       this.statement = null;
+                       this.constraintPrefix = constraintPrefix;
+                       this.instant = instant;
+                       this.cf = new Text(cf);
+                       this.cq = new Text(cq);
+               }
+               
+               /**
+                * this is the value to index.
+                * @return
+                */
+               public Value getValue() {
+                       assert statement!=null;
+                       return new 
Value(StringUtils.getBytesUtf8(StatementSerializer.writeStatement(statement)));
+               }
+
+               public KeyParts(Statement statement, TemporalInstant instant2) {
+                       this.queryMode = false; // store mode
+                       this.storeKey = null;
+                       this.constraintPrefix = null;
+                       this.statement = statement;
+                       this.instant = instant2;
+                       this.cf = null;
+                       this.cq = null;
+               }
+
+               private KeyParts(Text keyText, Text cf, Text cq, Statement 
statement) {
+                       this.queryMode = false; // store mode
+                       this.constraintPrefix = null;
+                       this.statement = statement;
+                       this.instant = null;
+                       this.storeKey = keyText;
+                       this.cf = cf;
+                       this.cq = cq;
+               }
+               
+               @Override
+               public Iterator<KeyParts> iterator() {
+                       final String[] strategies = new String[] {    
+                                       CQ_O_AT, CQ_S_P_AT, CQ_P_AT, CQ_S_AT
+                               } ;  // CQ_END?
+                       assert !queryMode : "iterator for queryMode is not 
immplemented" ;  
+                       if (queryMode)
+                               return null;
+
+                       // if (!queryMode)
+                       return new Iterator<KeyParts>() {
+                               int nextStrategy = 0;
+
+                               @Override
+                               public boolean hasNext() {
+                                       return nextStrategy < strategies.length;
+                               }
+
+                               @Override
+                               public KeyParts next() {
+                                       assert(statement!=null);
+                                       Text keyText = new Text();
+                                       // increment++ the next strategy AFTER 
getting the value
+                                       switch (nextStrategy++) {
+                                       case 0: // index o+hash(p+s)
+                                               assert 
(CQ_O_AT.equals(strategies[0]));
+                                               keyText = new 
Text(instant.getAsKeyBytes());
+                                               
KeyParts.appendUniqueness(statement, keyText);
+                                               return new KeyParts(keyText, 
new Text(StatementSerializer.writeContext(statement)), new Text(CQ_O_AT), 
statement);
+                                       case 1:// index hash(s+p)+o
+                                               assert 
(CQ_S_P_AT.equals(strategies[1]));
+                                               
KeyParts.appendSubjectPredicate(statement, keyText);
+                                               KeyParts.appendInstant(instant, 
keyText);
+                                               // appendUniqueness -- Not 
needed since it is already unique.
+                                               return new KeyParts(keyText, 
new Text(StatementSerializer.writeContext(statement)), new Text(CQ_S_P_AT), 
statement);
+                                       case 2: // index hash(p)+o
+                                               assert 
(CQ_P_AT.equals(strategies[2]));
+                                               
KeyParts.appendPredicate(statement, keyText);
+                                               KeyParts.appendInstant(instant, 
keyText);
+                                               
KeyParts.appendUniqueness(statement, keyText);
+                                               return new KeyParts(keyText, 
new Text(StatementSerializer.writeContext(statement)), new Text(CQ_P_AT), 
statement);
+                                       case 3: // index hash(s)+o
+                                               assert 
(CQ_S_AT.equals(strategies[3]));
+                                               
KeyParts.appendSubject(statement, keyText);
+                                               KeyParts.appendInstant(instant, 
keyText);
+                                               
KeyParts.appendUniqueness(statement, keyText);
+                                               return new KeyParts(keyText, 
new Text(StatementSerializer.writeContext(statement)), new Text(CQ_S_AT), 
statement);
+                                       }
+                                       throw new Error("Next passed end?  No 
such nextStrategy="+(nextStrategy-1));
+
+                               }
+
+                               @Override
+                               public void remove() {
+                                       throw new Error("Remove not 
Implemented.");
+                               }
+                       };
+               }
+               
+               public byte[] getStoreKey() {
+                       assert !queryMode : "must be in store Mode, store keys 
are not initialized.";
+                       return this.storeKey.copyBytes();
+               }
+
+               /**
+                * Query key is the prefix plus the datetime, but no uniqueness 
at the end.
+                * @return the row key for range queries.
+                */
+       public Text getQueryKey() {
+               return getQueryKey(this.instant);
+       };
+
+       /**
+        * Query key is the prefix plus the datetime, but no uniqueness at the 
end.
+        * 
+        * @return the row key for range queries.
+        */
+       public Text getQueryKey(TemporalInstant theInstant) {
+               assert queryMode : "must be in query Mode, query keys are not 
initialized.";
+               Text keyText = new Text();
+               if (constraintPrefix != null)
+                       appendBytes(constraintPrefix.copyBytes(), keyText);
+               appendInstant(theInstant, keyText);
+               return keyText;
+       };
+
+               @Override
+               public String toString() {
+                       return "KeyParts [contraintPrefix=" + 
toHumanString(constraintPrefix) + ", instant=" + 
toHumanString(instant.getAsKeyBytes()) + ", cf=" + cf + ", cq=" + cq + "]";
+               }
+           private static void appendSubject(Statement statement, Text 
keyText) {
+               Value statementValue = new 
Value(StatementSerializer.writeSubject(statement).getBytes());
+               byte[] hashOfValue = uniqueFromValueForKey(statementValue);
+               appendBytes(HASH_PREFIX, keyText); // prefix the hash with a 
zero byte. 
+               appendBytes(hashOfValue, keyText);
+               }
+
+               private static void appendPredicate(Statement statement, Text 
keyText) {
+               Value statementValue = new 
Value(StringUtils.getBytesUtf8(StatementSerializer.writePredicate(statement)));
+               byte[] hashOfValue = uniqueFromValueForKey(statementValue);
+               appendBytes(HASH_PREFIX, keyText); // prefix the hash with a 
zero byte. 
+               appendBytes(hashOfValue, keyText);
+               }
+
+               private static void appendInstant(TemporalInstant instant, Text 
keyText) {
+                       byte[] bytes = instant.getAsKeyBytes();
+               appendBytes(bytes, keyText);
+               }
+
+               private static void appendSubjectPredicate(Statement statement, 
Text keyText) {
+               Value statementValue = new 
Value(StringUtils.getBytesUtf8(StatementSerializer.writeSubjectPredicate(statement)));
+               byte[] hashOfValue = uniqueFromValueForKey(statementValue);
+               appendBytes(HASH_PREFIX, keyText); // prefix the hash with a 
zero byte. 
+               appendBytes(hashOfValue, keyText);
+               }
+
+               /**
+                * Append any byte array to a row key.
+                * @param bytes append this
+                * @param keyText text to append to
+                */
+               private static void appendBytes(byte[] bytes, Text keyText) {
+                       keyText.append(bytes, 0, bytes.length);
+               }
+
+               /**
+            * Get a collision unlikely hash string and append to the key, 
+            * so that if two keys have the same value, then they will be the 
same,
+            * if two different values that occur at the same time there keys 
are different.
+            * If the application uses a very large number of statements at the 
exact same time,
+            * the md5 value might be upgraded to for example sha-1 to avoid 
collisions.
+            * @param statement
+            * @param keyText
+            */
+           public static void appendUniqueness(Statement statement, Text 
keyText) {
+               keyText.append(HASH_PREFIX, 0, 1);   // delimiter
+               Value statementValue = new 
Value(StringUtils.getBytesUtf8(StatementSerializer.writeStatement(statement)));
+               byte[] hashOfValue = Md5Hash.md5Binary(statementValue);
+               keyText.append(hashOfValue, 0, hashOfValue.length);
+           }
+           /**
+            * Get a collision unlikely hash string to append to the key, 
+            * so that if two keys have the same value, then they will be the 
same,
+            * if two different values that occur at the same time there keys 
are different.
+            * @param value
+            * @return
+            */
+           private static byte[] uniqueFromValueForKey(Value value) {
+               return Md5Hash.md5Binary(value);
+           }
+
+               /**
+                * List all the index keys to find for any query.  Set the 
strategy via the column qualifier, ex: CQ_S_P_AT.
+                * Column Family (CF) is the context/named-graph.
+                * @param queryInstant
+                * @param contraints
+                * @return
+                */
+               static public List<KeyParts> keyPartsForQuery(TemporalInstant 
queryInstant, StatementContraints contraints) {
+                       List<KeyParts> keys = new LinkedList<KeyParts>();
+                       URI urlNull = new URIImpl("urn:null");
+                       Resource currentContext = contraints.getContext();
+                       boolean hasSubj = contraints.hasSubject();
+                       if (contraints.hasPredicates()) {
+                               for (URI nextPredicate : 
contraints.getPredicates()) {
+                                       Text contraintPrefix  = new Text();
+                                       Statement statement = new 
ContextStatementImpl(hasSubj ? contraints.getSubject() : urlNull, 
nextPredicate, urlNull, contraints.getContext());
+                                       if (hasSubj)
+                                               
appendSubjectPredicate(statement, contraintPrefix);
+                                       else
+                                               appendPredicate(statement, 
contraintPrefix);
+                                       keys.add(new KeyParts(contraintPrefix, 
queryInstant, (currentContext==null)?"":currentContext.toString(), 
hasSubj?CQ_S_P_AT:CQ_P_AT  ));
+                               }
+                       }
+                       else if (contraints.hasSubject()) { // and no predicates
+                               Text contraintPrefix = new Text();
+                               Statement statement = new 
StatementImpl(contraints.getSubject(), urlNull, urlNull);
+                               appendSubject(statement, contraintPrefix);
+                               keys.add( new KeyParts(contraintPrefix, 
queryInstant, (currentContext==null)?"":currentContext.toString(), CQ_S_AT) );
+                       }
+                       else {
+                               // No constraints except possibly a 
context/named-graph, handled by the CF
+                                keys.add( new KeyParts(null, queryInstant, 
(currentContext==null)?"":currentContext.toString(), CQ_O_AT) );
+                       }
+                       return keys;
+               }
+           /**
+            * convert a non-utf8 byte[] and text and value to string and show 
unprintable bytes as {xx} where x is hex.
+            * @param value
+            * @return Human readable representation.
+            */
+               public static String toHumanString(Value value) {
+                       return toHumanString(value==null?null:value.get());
+               }
+               public static String toHumanString(Text text) {
+                       return toHumanString(text==null?null:text.copyBytes());
+               }
+               public static String toHumanString(byte[] bytes) {
+                       if (bytes==null) 
+                               return "{null}";
+                       StringBuilder sb = new StringBuilder();
+                       for (byte b : bytes) {
+                               if ((b > 0x7e) || (b < 32)) {
+                                       sb.append("{");
+                                       sb.append(Integer.toHexString( b & 0xff 
)); // Lop off the sign extended ones.
+                                       sb.append("}");
+                               } else if (b == '{'||b == '}') { // Escape the 
literal braces.
+                                       sb.append("{");
+                                       sb.append((char)b);
+                                       sb.append("}");
+                               } else
+                                       sb.append((char)b);
+                       }
+                       return sb.toString();
+               }
+               
+       }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexing/src/main/java/mvm/rya/indexing/PrecompQueryIndexer.java
----------------------------------------------------------------------
diff --git 
a/extras/indexing/src/main/java/mvm/rya/indexing/PrecompQueryIndexer.java 
b/extras/indexing/src/main/java/mvm/rya/indexing/PrecompQueryIndexer.java
new file mode 100644
index 0000000..9f7ef86
--- /dev/null
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/PrecompQueryIndexer.java
@@ -0,0 +1,44 @@
+package mvm.rya.indexing;
+
+import info.aduna.iteration.CloseableIteration;
+
+import java.io.Closeable;
+import java.io.Flushable;
+import java.io.IOException;
+import java.util.Collection;
+import java.util.List;
+import java.util.Map;
+
+import mvm.rya.indexing.external.tupleSet.AccumuloIndexSet.AccValueFactory;
+
+import org.apache.accumulo.core.client.BatchScanner;
+import org.apache.accumulo.core.client.TableNotFoundException;
+import org.openrdf.model.Value;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.QueryEvaluationException;
+
+
+
+public interface PrecompQueryIndexer extends Closeable, Flushable {
+
+    
+    public abstract void storeBindingSet(BindingSet bs) throws IOException ;
+
+    public abstract void storeBindingSets(Collection<BindingSet> bindingSets)
+            throws IOException, IllegalArgumentException;
+
+
+    public abstract CloseableIteration<BindingSet, QueryEvaluationException> 
queryPrecompJoin(List<String> varOrder, 
+            String localityGroup, Map<String, AccValueFactory> bindings, 
Map<String,Value> valMap, 
+            Collection<BindingSet> constraints) throws 
QueryEvaluationException,TableNotFoundException;
+
+   
+
+    @Override
+    public abstract void flush() throws IOException;
+
+    @Override
+    public abstract void close() throws IOException;
+}
+
+ 
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexing/src/main/java/mvm/rya/indexing/RyaSailFactory.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/RyaSailFactory.java 
b/extras/indexing/src/main/java/mvm/rya/indexing/RyaSailFactory.java
new file mode 100644
index 0000000..5d9615f
--- /dev/null
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/RyaSailFactory.java
@@ -0,0 +1,64 @@
+package mvm.rya.indexing;
+
+import mvm.rya.accumulo.AccumuloRdfConfiguration;
+import mvm.rya.accumulo.AccumuloRyaDAO;
+import mvm.rya.api.RdfCloudTripleStoreConfiguration;
+import mvm.rya.api.persist.RyaDAOException;
+import mvm.rya.indexing.accumulo.ConfigUtils;
+import mvm.rya.mongodb.MongoDBRdfConfiguration;
+import mvm.rya.mongodb.MongoDBRyaDAO;
+import mvm.rya.rdftriplestore.RdfCloudTripleStore;
+
+import org.apache.accumulo.core.client.AccumuloException;
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.accumulo.core.client.Connector;
+import org.apache.hadoop.conf.Configuration;
+import org.openrdf.sail.Sail;
+
+public class RyaSailFactory {
+
+    
+    
+    public static Sail getInstance(Configuration conf) throws 
AccumuloException, 
+    AccumuloSecurityException, RyaDAOException {
+        
+        return getRyaSail(conf);
+    }
+    
+   
+    
+    private static Sail getRyaSail(Configuration config) throws 
AccumuloException, AccumuloSecurityException, RyaDAOException {
+        
+        RdfCloudTripleStore store = new RdfCloudTripleStore();
+        if (ConfigUtils.getUseMongo(config)) {
+            MongoDBRdfConfiguration conf = new MongoDBRdfConfiguration(config);
+            
conf.setTablePrefix(config.get(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX));
+            ConfigUtils.setIndexers(conf);
+            
+            MongoDBRyaDAO crdfdao = new MongoDBRyaDAO(conf);
+            crdfdao.init();
+            
+            conf.setDisplayQueryPlan(true);
+            store.setRyaDAO(crdfdao);
+        } else {
+            Connector connector = ConfigUtils.getConnector(config);
+            AccumuloRyaDAO crdfdao = new AccumuloRyaDAO();
+            crdfdao.setConnector(connector);
+
+            AccumuloRdfConfiguration conf = new 
AccumuloRdfConfiguration(config);
+            
conf.setTablePrefix(config.get(RdfCloudTripleStoreConfiguration.CONF_TBL_PREFIX));
 // sets
+                                                                               
                // TablePrefixLayoutStrategy
+            ConfigUtils.setIndexers(conf);
+            conf.setDisplayQueryPlan(true);
+
+            crdfdao.setConf(conf);
+            crdfdao.init();
+            store.setRyaDAO(crdfdao);
+        }
+
+        return store;
+    }
+
+    
+    
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunction.java
----------------------------------------------------------------------
diff --git a/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunction.java 
b/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunction.java
new file mode 100644
index 0000000..c2cbbfc
--- /dev/null
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunction.java
@@ -0,0 +1,25 @@
+package mvm.rya.indexing;
+
+import info.aduna.iteration.CloseableIteration;
+import org.openrdf.model.Statement;
+import org.openrdf.query.QueryEvaluationException;
+
+/**
+ * A function used to perform a search.
+ */
+public interface SearchFunction {
+
+    /**
+     * Search the indices for the given terms and return {@link Statement}s 
that meet the {@link StatementContraints}
+     * 
+     * @param searchTerms
+     *            the search terms
+     * @param contraints
+     *            the constraints on the returned {@link Statement}s
+     * @return
+     * @throws QueryEvaluationException
+     */
+    public abstract CloseableIteration<Statement, QueryEvaluationException> 
performSearch(String searchTerms, StatementContraints contraints)
+            throws QueryEvaluationException;
+
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/92ddfa59/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunctionFactory.java
----------------------------------------------------------------------
diff --git 
a/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunctionFactory.java 
b/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunctionFactory.java
new file mode 100644
index 0000000..dad4e35
--- /dev/null
+++ b/extras/indexing/src/main/java/mvm/rya/indexing/SearchFunctionFactory.java
@@ -0,0 +1,51 @@
+package mvm.rya.indexing;
+
+import java.util.Map;
+
+import org.apache.log4j.Logger;
+import org.openrdf.model.URI;
+import org.openrdf.query.QueryEvaluationException;
+
+import com.google.common.collect.Maps;
+
+public abstract class SearchFunctionFactory {
+    
+    private static final Logger logger = 
Logger.getLogger(SearchFunctionFactory.class);
+
+    private final Map<URI, SearchFunction> SEARCH_FUNCTION_MAP = 
Maps.newHashMap();
+
+
+    /**
+     * Get a {@link GeoSearchFunction} for a give URI.
+     * 
+     * @param searchFunction
+     * @return
+     */
+    public SearchFunction getSearchFunction(final URI searchFunction) {
+
+        SearchFunction geoFunc = null;
+
+        try {
+            geoFunc = getSearchFunctionInternal(searchFunction);
+        } catch (QueryEvaluationException e) {
+            e.printStackTrace();
+        }
+
+        return geoFunc;
+    }
+
+    private SearchFunction getSearchFunctionInternal(final URI searchFunction) 
throws QueryEvaluationException {
+        SearchFunction sf = SEARCH_FUNCTION_MAP.get(searchFunction);
+
+        if (sf != null) {
+            return sf;
+        } else {
+            throw new QueryEvaluationException("Unknown Search Function: " + 
searchFunction.stringValue());
+        }
+            
+       
+    }
+
+   
+}
+  
\ No newline at end of file


Reply via email to