http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9f611019/extras/rya.forwardchain/src/main/java/org/apache/rya/forwardchain/strategy/AbstractRuleExecutionStrategy.java
----------------------------------------------------------------------
diff --git 
a/extras/rya.forwardchain/src/main/java/org/apache/rya/forwardchain/strategy/AbstractRuleExecutionStrategy.java
 
b/extras/rya.forwardchain/src/main/java/org/apache/rya/forwardchain/strategy/AbstractRuleExecutionStrategy.java
new file mode 100644
index 0000000..24c8de9
--- /dev/null
+++ 
b/extras/rya.forwardchain/src/main/java/org/apache/rya/forwardchain/strategy/AbstractRuleExecutionStrategy.java
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.forwardchain.strategy;
+
+import org.apache.rya.api.domain.StatementMetadata;
+import org.apache.rya.forwardchain.ForwardChainException;
+
+import org.apache.rya.forwardchain.rule.AbstractConstructRule;
+import org.apache.rya.forwardchain.rule.AbstractInconsistencyRule;
+import org.apache.rya.forwardchain.rule.AbstractUpdateRule;
+
+/**
+ * Base class for rule application strategies, which can execute a single
+ * forward chaining (materialization) rule at a time. Subclasses may provide
+ * implementations of methods to execute whichever they support of construct
+ * rules, update rules, and inconsistency rules. The default behavior for all
+ * kinds is to throw an {@link UnsupportedOperationException}.
+ */
+public abstract class AbstractRuleExecutionStrategy {
+    protected int requiredLevel = 0;
+
+    /**
+     * Execute a rule corresponding to a "CONSTRUCT" query. Throws an
+     * UnsupportedOperationException if not explicitly overridden.
+     * @param rule The construct rule to apply; assumed not null.
+     * @param metadata Additional metadata to add to any inferred triples;
+     *  assumed not null.
+     * @return The number of inferred triples. Higher-level forward chaining
+     *  strategies may rely on the accuracy of this number.
+     * @throws ForwardChainException if execution failed.
+     */
+    public long executeConstructRule(AbstractConstructRule rule,
+            StatementMetadata metadata) throws ForwardChainException {
+        throw new UnsupportedOperationException("Rule execution strategy does 
not support construct rules.");
+    };
+
+    /**
+     * Execute a rule corresponding to an update query. Throws an
+     * UnsupportedOperationException if not explicitly overridden.
+     * @param rule The update rule to apply; assumed not null.
+     * @param metadata Additional metadata to add to any updated triples;
+     *  assumed not null.
+     * @return The number of inferences made. Higher-level forward chaining
+     *  strategies may rely on the accuracy of this number.
+     * @throws ForwardChainException if execution failed.
+     */
+    public long executeUpdateRule(AbstractUpdateRule rule,
+            StatementMetadata metadata) throws ForwardChainException {
+        throw new UnsupportedOperationException("Rule execution strategy does 
not support update rules.");
+    };
+
+    /**
+     * Execute a rule capable of detecting inconsistencies. Throws an
+     * UnsupportedOperationException if not explicitly overridden.
+     * @param rule The inconsistency rule to apply; assumed not null.
+     * @param metadata Additional metadata associated with inconsistencies;
+     *  assumed not null.
+     * @return The number of inconsistencies found.
+     * @throws ForwardChainException if execution failed.
+     */
+    public long executeInconsistencyRule(AbstractInconsistencyRule rule,
+            StatementMetadata metadata) throws ForwardChainException {
+        throw new UnsupportedOperationException("Rule execution strategy does 
not perform inconsistency detection.");
+    }
+
+    /**
+     * Initialize the strategy and make any preparations for executing rules.
+     * Does nothing by default; subclasses should override if necessary.
+     * @throws ForwardChainException
+     */
+    public void initialize() throws ForwardChainException { };
+
+    /**
+     * Shut down the strategy and perform any appropriate cleanup. Does nothing
+     * by default; subclasses should override if necessary.
+     * @throws ForwardChainException
+     */
+    public void shutDown() throws ForwardChainException { }
+
+    /**
+     * Indicate that a rule need only be applied if one of the source 
statements
+     * is is at least this derivation level, i.e. took this many steps to 
derive
+     * itself. Subclasses may use this for optimization, but are not guaranteed
+     * to.
+     * @param derivationLevel Forward chaining level of statements that should
+     *  be used to trigger rules. If not set, defaults to zero which should 
have
+     *  no effect.
+     */
+    public void setRequiredLevel(int derivationLevel) {
+        this.requiredLevel = derivationLevel;
+    };
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9f611019/extras/rya.forwardchain/src/main/java/org/apache/rya/forwardchain/strategy/MongoPipelineStrategy.java
----------------------------------------------------------------------
diff --git 
a/extras/rya.forwardchain/src/main/java/org/apache/rya/forwardchain/strategy/MongoPipelineStrategy.java
 
b/extras/rya.forwardchain/src/main/java/org/apache/rya/forwardchain/strategy/MongoPipelineStrategy.java
new file mode 100644
index 0000000..c095122
--- /dev/null
+++ 
b/extras/rya.forwardchain/src/main/java/org/apache/rya/forwardchain/strategy/MongoPipelineStrategy.java
@@ -0,0 +1,276 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.forwardchain.strategy;
+
+import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.LongAdder;
+
+import org.apache.log4j.Logger;
+import org.apache.rya.api.domain.RyaStatement;
+import org.apache.rya.api.domain.StatementMetadata;
+import org.apache.rya.api.persist.RyaDAOException;
+import org.apache.rya.api.persist.query.RyaQuery;
+import org.apache.rya.api.persist.query.RyaQueryEngine;
+import org.apache.rya.forwardchain.ForwardChainException;
+import org.apache.rya.forwardchain.rule.AbstractConstructRule;
+import org.apache.rya.forwardchain.rule.Rule;
+import org.apache.rya.mongodb.MongoDBRdfConfiguration;
+import org.apache.rya.mongodb.MongoDBRyaDAO;
+import org.apache.rya.mongodb.StatefulMongoDBRdfConfiguration;
+import org.apache.rya.mongodb.aggregation.AggregationPipelineQueryNode;
+import org.apache.rya.mongodb.aggregation.SparqlToPipelineTransformVisitor;
+import org.apache.rya.mongodb.batch.MongoDbBatchWriter;
+import org.apache.rya.mongodb.batch.MongoDbBatchWriterConfig;
+import org.apache.rya.mongodb.batch.MongoDbBatchWriterException;
+import org.apache.rya.mongodb.batch.MongoDbBatchWriterUtils;
+import org.apache.rya.mongodb.batch.collection.CollectionType;
+import org.apache.rya.mongodb.batch.collection.MongoCollectionType;
+import org.apache.rya.mongodb.dao.SimpleMongoDBStorageStrategy;
+import org.apache.rya.sail.config.RyaSailFactory;
+import org.bson.Document;
+import org.bson.conversions.Bson;
+import org.openrdf.query.algebra.QueryRoot;
+import org.openrdf.query.algebra.TupleExpr;
+
+import com.google.common.base.Preconditions;
+import com.mongodb.Block;
+import com.mongodb.DBObject;
+import com.mongodb.MongoClient;
+import com.mongodb.client.MongoCollection;
+import com.mongodb.client.MongoDatabase;
+import com.mongodb.util.JSON;
+
+/**
+ * A rule execution strategy for MongoDB Rya that converts a single rule into 
an
+ * aggregation pipeline whenever possible. Falls back on an internal
+ * {@link SailExecutionStrategy} to handle any rules that can't be converted.
+ */
+public class MongoPipelineStrategy extends AbstractRuleExecutionStrategy {
+    private static final Logger logger = 
Logger.getLogger(MongoPipelineStrategy.class);
+
+    private static final int PIPELINE_BATCH_SIZE = 1000;
+
+    private final SparqlToPipelineTransformVisitor pipelineVisitor;
+    private final MongoCollection<Document> baseCollection;
+    private final MongoDbBatchWriter<Document> batchWriter;
+    private final MongoDBRyaDAO dao;
+    private final SimpleMongoDBStorageStrategy storageStrategy = new 
SimpleMongoDBStorageStrategy();
+    private final ConcurrentHashMap<Rule, Long> executionTimes = new 
ConcurrentHashMap<>();
+    private final AbstractRuleExecutionStrategy backup;
+    private final RyaQueryEngine<StatefulMongoDBRdfConfiguration> engine;
+    private boolean usedBackup = false;
+
+    /**
+     * Initialize based on a configuration.
+     * @param mongoConf Should contain database information; cannot be null. If
+     *      passed a stateful configuration, uses the existing mongo client,
+     *      otherwise creates one.
+     */
+    public MongoPipelineStrategy(MongoDBRdfConfiguration mongoConf) throws 
ForwardChainException {
+        Preconditions.checkNotNull(mongoConf);
+        final String mongoDBName = mongoConf.getMongoDBName();
+        final String collectionName = mongoConf.getTriplesCollectionName();
+        mongoConf.setFlush(false);
+        final StatefulMongoDBRdfConfiguration statefulConf;
+        try {
+            if (mongoConf instanceof StatefulMongoDBRdfConfiguration) {
+                statefulConf = (StatefulMongoDBRdfConfiguration) mongoConf;
+                this.dao = new MongoDBRyaDAO();
+                this.dao.setConf(statefulConf);
+                this.dao.init();
+            }
+            else {
+                this.dao = RyaSailFactory.getMongoDAO(mongoConf);
+                statefulConf = this.dao.getConf();
+            }
+        } catch (RyaDAOException e) {
+            throw new ForwardChainException("Can't connect to Rya.", e);
+        }
+        final MongoClient mongoClient = statefulConf.getMongoClient();
+        final MongoDatabase mongoDB = mongoClient.getDatabase(mongoDBName);
+        this.baseCollection = mongoDB.getCollection(collectionName);
+        this.pipelineVisitor = new 
SparqlToPipelineTransformVisitor(this.baseCollection);
+        this.engine = this.dao.getQueryEngine();
+        this.backup = new SailExecutionStrategy(statefulConf);
+        final MongoDbBatchWriterConfig writerConfig = 
MongoDbBatchWriterUtils.getMongoDbBatchWriterConfig(statefulConf);
+        final CollectionType<Document> ct = new 
MongoCollectionType(baseCollection);
+        this.batchWriter = new MongoDbBatchWriter<>(ct, writerConfig);
+        try {
+            this.batchWriter.start();
+        } catch (final MongoDbBatchWriterException e) {
+            throw new ForwardChainException("Error starting MongoDB batch 
writer", e);
+        }
+    }
+
+    /**
+     * Execute a CONSTRUCT rule by converting it into a pipeline, iterating
+     * through the resulting documents, and inserting them back to the data
+     * store as new triples. If pipeline conversion fails, falls back on
+     * default execution strategy.
+     * @param rule A construct query rule; not null.
+     * @param metadata StatementMetadata to attach to new triples; not null.
+     * @return The number of new triples inferred.
+     * @throws ForwardChainException if execution fails.
+     */
+    @Override
+    public long executeConstructRule(AbstractConstructRule rule,
+            StatementMetadata metadata) throws ForwardChainException {
+        Preconditions.checkNotNull(rule);
+        logger.info("Applying inference rule " + rule + "...");
+        long timestamp = System.currentTimeMillis();
+        // Get a pipeline that turns individual matches into triples
+        List<Bson> pipeline = null;
+        try {
+            int requireSourceLevel = 0;
+            if (!usedBackup) {
+                // If we can assume derivation levels are set properly, we can 
optimize by
+                // pruning any derived fact whose sources are all old 
information. (i.e. we can
+                // infer that the pruned fact would have already been derived 
in a previous
+                // step.) But if the backup strategy has ever been used, the 
source triples aren't
+                // guaranteed to have derivation level set.
+                requireSourceLevel = requiredLevel;
+            }
+            pipeline = toPipeline(rule, requireSourceLevel, timestamp);
+        }
+        catch (ForwardChainException e) {
+            logger.error(e);
+        }
+        if (pipeline == null) {
+            if (backup == null) {
+                logger.error("Couldn't convert " + rule + " to pipeline:");
+                for (String line : rule.getQuery().toString().split("\n")) {
+                    logger.error("\t" + line);
+                }
+                throw new UnsupportedOperationException("Couldn't convert 
query to pipeline.");
+            }
+            else {
+                logger.debug("Couldn't convert " + rule + " to pipeline:");
+                for (String line : rule.getQuery().toString().split("\n")) {
+                    logger.debug("\t" + line);
+                }
+                logger.debug("Using fallback strategy.");
+                usedBackup = true;
+                return backup.executeConstructRule(rule, metadata);
+            }
+        }
+        // Execute the pipeline
+        for (Bson step : pipeline) {
+            logger.debug("\t" + step.toString());
+        }
+        LongAdder count = new LongAdder();
+        baseCollection.aggregate(pipeline)
+            .allowDiskUse(true)
+            .batchSize(PIPELINE_BATCH_SIZE)
+            .forEach(new Block<Document>() {
+                @Override
+                public void apply(Document doc) {
+                    final DBObject dbo = (DBObject) JSON.parse(doc.toJson());
+                    RyaStatement rstmt = 
storageStrategy.deserializeDBObject(dbo);
+                    if (!statementExists(rstmt)) {
+                        count.increment();
+                        
doc.replace(SimpleMongoDBStorageStrategy.STATEMENT_METADATA, 
metadata.toString());
+                        try {
+                            batchWriter.addObjectToQueue(doc);
+                        } catch (MongoDbBatchWriterException e) {
+                            logger.error("Couldn't insert " + rstmt, e);
+                        }
+                    }
+                }
+            });
+        try {
+            batchWriter.flush();
+        } catch (MongoDbBatchWriterException e) {
+            throw new ForwardChainException("Error writing to Mongo", e);
+        }
+        logger.info("Added " + count + " new statements.");
+        executionTimes.compute(rule, (r, previous) -> {
+            if (previous != null && previous > timestamp) {
+                return previous;
+            }
+            else {
+                return timestamp;
+            }
+        });
+        return count.longValue();
+    }
+
+    private boolean statementExists(RyaStatement rstmt) {
+        try {
+            return engine.query(new RyaQuery(rstmt)).iterator().hasNext();
+        } catch (RyaDAOException e) {
+            logger.error("Error querying for " + rstmt, e);
+            return false;
+        }
+    }
+
+    /**
+     * Flush and close the batch writer, and shut down the backup
+     * SailExecutionStrategy.
+     * @throws ForwardChainException if the batch writer or backup strategy
+     *  throw any errors.
+     */
+    @Override
+    public void shutDown() throws ForwardChainException {
+        backup.shutDown();
+        try {
+            batchWriter.shutdown();
+        } catch (MongoDbBatchWriterException e) {
+            throw new ForwardChainException("Error shutting down batch 
writer", e);
+        }
+    }
+
+    /**
+     * Converts a construct rule into a series of documents representing
+     * aggregation pipeline steps.
+     * @param rule A construct query rule.
+     * @param sourceLevel Only make derivations whose source triples have this
+     *  derivation level or higher, i.e. took some number of forward chaining
+     *  steps to infer. Set to zero to skip this check.
+     * @param timestamp Timestamp to be set for all inferred triples.
+     * @return An aggregation pipeline.
+     * @throws ForwardChainException if pipeline construction fails.
+     */
+    private List<Bson> toPipeline(AbstractConstructRule rule, int sourceLevel,
+            long timestamp) throws ForwardChainException {
+        TupleExpr tupleExpr = rule.getQuery().getTupleExpr();
+        if (!(tupleExpr instanceof QueryRoot)) {
+            tupleExpr = new QueryRoot(tupleExpr);
+        }
+        try {
+            tupleExpr.visit(pipelineVisitor);
+        } catch (Exception e) {
+            throw new ForwardChainException("Error converting construct rule 
to an aggregation pipeline", e);
+        }
+        if (tupleExpr instanceof QueryRoot) {
+            QueryRoot root = (QueryRoot) tupleExpr;
+            if (root.getArg() instanceof AggregationPipelineQueryNode) {
+                AggregationPipelineQueryNode pipelineNode = 
(AggregationPipelineQueryNode) root.getArg();
+                pipelineNode.distinct(); // require distinct triples
+                pipelineNode.requireSourceDerivationDepth(sourceLevel);
+                long latestTime = executionTimes.getOrDefault(rule, 0L);
+                if (latestTime > 0) {
+                    pipelineNode.requireSourceTimestamp(latestTime);
+                }
+                return pipelineNode.getTriplePipeline(timestamp, false);
+            }
+        }
+        return null;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9f611019/extras/rya.forwardchain/src/main/java/org/apache/rya/forwardchain/strategy/RoundRobinStrategy.java
----------------------------------------------------------------------
diff --git 
a/extras/rya.forwardchain/src/main/java/org/apache/rya/forwardchain/strategy/RoundRobinStrategy.java
 
b/extras/rya.forwardchain/src/main/java/org/apache/rya/forwardchain/strategy/RoundRobinStrategy.java
new file mode 100644
index 0000000..eb044fc
--- /dev/null
+++ 
b/extras/rya.forwardchain/src/main/java/org/apache/rya/forwardchain/strategy/RoundRobinStrategy.java
@@ -0,0 +1,212 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.forwardchain.strategy;
+
+import java.util.Comparator;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.SortedSet;
+import java.util.TreeSet;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.log4j.Logger;
+import org.apache.rya.api.domain.RyaType;
+import org.apache.rya.api.domain.StatementMetadata;
+import org.apache.rya.forwardchain.ForwardChainConstants;
+import org.apache.rya.forwardchain.ForwardChainException;
+import org.apache.rya.forwardchain.rule.Rule;
+import org.apache.rya.forwardchain.rule.Ruleset;
+import org.openrdf.model.vocabulary.XMLSchema;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * A simple {@link AbstractForwardChainStrategy} that iterates over every
+ * relevant rule once, and repeats until no rules are relevant.
+ * <p>
+ * Initially, all rules are considered relevant. Iteration 1 executes each rule
+ * once.
+ * <p>
+ * When a rule produces inferences, all rules that are marked as that rule's
+ * successors according to the {@link Ruleset} are triggered as potentially
+ * relevant for future execution. If a triggered rule is scheduled to be
+ * executed during the current iteration, nothing changes. If a triggered rule
+ * has already been executed once during the current iteration, or was not
+ * activated for the current iteration at all, it is flagged to be executed
+ * during the next iteration.
+ * <p>
+ * When an iteration concludes, a new iteration begins with the relevant set of
+ * rules having been determined during the previous iteration. If there are no
+ * such rules, forward chaining ends.
+ * <p>
+ * Within each iteration, rules are processed such that a rule which may 
trigger
+ * many other rules is given priority over a rule that may be triggered by many
+ * other rules.
+ * <p>
+ * The observation that one rule may trigger another is based on the
+ * relationships between triple patterns produced and consumed by the rules in
+ * general, not based on any triples that were actually generated. Therefore,
+ * there may be false positives but not false negatives: Rules triggered by the
+ * current rule may or may not produce more triples in response, but any rule
+ * that could produce triples in response will be triggered.
+ * <p>
+ * The procedure for executing the individual rules is governed by the
+ * {@link RuleExecutionStrategy}. This class uses the strategy's reported 
counts
+ * to determine whether or not a rule has produced inferences.
+ */
+public class RoundRobinStrategy extends AbstractForwardChainStrategy {
+    private static final Logger logger = 
Logger.getLogger(RoundRobinStrategy.class);
+
+    private final AbstractRuleExecutionStrategy ruleStrategy;
+    private int iteration;
+    private Ruleset ruleset;
+    private Set<Rule> activeNow;
+    private Set<Rule> activeNextIteration;
+    private long inferencesThisIteration;
+    private AtomicBoolean initialized = new AtomicBoolean(false);
+
+    /**
+     * Instantiate a RoundRobinStrategy by providing the RuleExecutionStrategy.
+     * @param ruleStrategy Defines how to execute individual rules; not null.
+     */
+    public RoundRobinStrategy(AbstractRuleExecutionStrategy ruleStrategy) {
+        Preconditions.checkNotNull(ruleStrategy);
+        this.ruleStrategy = ruleStrategy;
+    }
+
+    @Override
+    public void initialize(Ruleset withRuleset) throws ForwardChainException {
+        Preconditions.checkNotNull(withRuleset);
+        iteration = 0;
+        ruleset = withRuleset;
+        activeNow = new HashSet<>();
+        activeNextIteration = new HashSet<>(ruleset.getRules());
+        logger.info("Initializing round robin forward chaining, with " +
+                activeNextIteration.size() + " rules.");
+        initialized.set(true);
+        prepareQueue();
+    }
+
+    private void prepareQueue() throws ForwardChainException {
+        if (initialized.get()) {
+            if (activeNow.isEmpty()) {
+                if (iteration > 0) {
+                    logger.info("Finished iteration " + iteration + "; made " +
+                            inferencesThisIteration + " inferences.");
+                }
+                if (activeNextIteration.isEmpty()) {
+                    logger.info("Finished forward chaining after " + iteration 
+ " iterations.");
+                    setDone();
+                }
+                else {
+                    ruleStrategy.setRequiredLevel(iteration);
+                    iteration++;
+                    inferencesThisIteration = 0;
+                    activeNow.addAll(activeNextIteration);
+                    activeNextIteration.clear();
+                    logger.info("Beginning iteration " + iteration + ", with " 
+
+                            activeNow.size() + " rules to execute...");
+                }
+            }
+        }
+    }
+
+    private void setDone() throws ForwardChainException {
+        initialized.set(false);
+        if (ruleStrategy != null) {
+            ruleStrategy.shutDown();
+        }
+    }
+
+    @Override
+    public boolean isActive() {
+        return initialized.get();
+    }
+
+    @Override
+    public long executeNext() throws ForwardChainException {
+        if (!initialized.get()) {
+            return 0;
+        }
+        Rule rule = getNextRule();
+        if (rule == null) {
+            return 0;
+        }
+        StatementMetadata metadata = new StatementMetadata();
+        metadata.addMetadata(ForwardChainConstants.RYA_DERIVATION_TIME,
+                new RyaType(XMLSchema.INT, Integer.toString(iteration)));
+        long inferences = rule.execute(ruleStrategy, metadata);
+        inferencesThisIteration += inferences;
+        if (inferences > 0) {
+            for (Rule successor : ruleset.getSuccessorsOf(rule)) {
+                // If we'll handle the triggered rule in the current iteration,
+                // it may not need  to be checked in the next one.
+                if (!activeNow.contains(successor)) {
+                    activeNextIteration.add(successor);
+                }
+            }
+        }
+        prepareQueue();
+        return inferences;
+    }
+
+    private Rule getNextRule() {
+        if (activeNow.isEmpty()) {
+            return null;
+        }
+        Ruleset subset = new Ruleset(activeNow);
+        SortedSet<Rule> sorted = new TreeSet<>(new Comparator<Rule>() {
+            @Override
+            public int compare(Rule r1, Rule r2) {
+                // If one rule triggers the other (directly or indirectly) but
+                // not the other way around, the one that triggers the other
+                // should come first.
+                boolean forwardPath = subset.pathExists(r1, r2);
+                boolean backwardPath = subset.pathExists(r2, r1);
+                if (forwardPath && !backwardPath) {
+                    return -1;
+                }
+                if (backwardPath && !forwardPath) {
+                    return 1;
+                }
+                return 0;
+            }
+        }.thenComparingInt(rule -> {
+            // Otherwise, prioritize rules that trigger many remaining rules,
+            // and defer rules that can be triggered by many remaining rules.
+            return remainingPredecessors(rule).size() - 
remainingSuccessors(rule).size();
+        }).thenComparing(Rule::toString)); // Fall back on string comparison
+        sorted.addAll(activeNow);
+        Rule next = sorted.first();
+        activeNow.remove(next);
+        return next;
+    }
+
+    private Set<Rule> remainingSuccessors(Rule rule) {
+        Set<Rule> successors = new HashSet<>(ruleset.getSuccessorsOf(rule));
+        successors.retainAll(activeNow);
+        return successors;
+    }
+
+    private Set<Rule> remainingPredecessors(Rule rule) {
+        Set<Rule> predecessors = new 
HashSet<>(ruleset.getPredecessorsOf(rule));
+        predecessors.retainAll(activeNow);
+        return predecessors;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9f611019/extras/rya.forwardchain/src/main/java/org/apache/rya/forwardchain/strategy/SailExecutionStrategy.java
----------------------------------------------------------------------
diff --git 
a/extras/rya.forwardchain/src/main/java/org/apache/rya/forwardchain/strategy/SailExecutionStrategy.java
 
b/extras/rya.forwardchain/src/main/java/org/apache/rya/forwardchain/strategy/SailExecutionStrategy.java
new file mode 100644
index 0000000..d09c50c
--- /dev/null
+++ 
b/extras/rya.forwardchain/src/main/java/org/apache/rya/forwardchain/strategy/SailExecutionStrategy.java
@@ -0,0 +1,223 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.forwardchain.strategy;
+
+import org.apache.accumulo.core.client.AccumuloException;
+import org.apache.accumulo.core.client.AccumuloSecurityException;
+import org.apache.log4j.Logger;
+import org.apache.rya.accumulo.AccumuloRdfConfiguration;
+import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
+import org.apache.rya.api.domain.RyaStatement;
+import org.apache.rya.api.domain.StatementMetadata;
+import org.apache.rya.api.persist.RyaDAO;
+import org.apache.rya.api.persist.RyaDAOException;
+import org.apache.rya.api.persist.query.RyaQuery;
+import org.apache.rya.api.persist.query.RyaQueryEngine;
+import org.apache.rya.api.resolver.RdfToRyaConversions;
+import org.apache.rya.forwardchain.ForwardChainException;
+import org.apache.rya.forwardchain.rule.AbstractConstructRule;
+import org.apache.rya.indexing.accumulo.ConfigUtils;
+import org.apache.rya.mongodb.MongoDBRdfConfiguration;
+import org.apache.rya.sail.config.RyaSailFactory;
+import org.calrissian.mango.collect.CloseableIterable;
+import org.openrdf.model.Statement;
+import org.openrdf.query.GraphQuery;
+import org.openrdf.query.QueryEvaluationException;
+import org.openrdf.query.parser.ParsedGraphQuery;
+import org.openrdf.repository.RepositoryException;
+import org.openrdf.repository.sail.SailGraphQuery;
+import org.openrdf.repository.sail.SailRepository;
+import org.openrdf.repository.sail.SailRepositoryConnection;
+import org.openrdf.rio.RDFHandlerException;
+import org.openrdf.rio.helpers.RDFHandlerBase;
+
+import com.google.common.base.Preconditions;
+
+/**
+ * A naive but back-end-agnostic rule execution strategy that applies a
+ * construct rule by submitting the associated query to a Rya SAIL, then
+ * converting the resulting bindings (expecting variables "subject",
+ * "predicate", and "object") into triples and inserting them into a Rya DAO.
+ */
+public class SailExecutionStrategy extends AbstractRuleExecutionStrategy {
+    private static final Logger logger = 
Logger.getLogger(SailExecutionStrategy.class);
+
+    private final RdfCloudTripleStoreConfiguration conf;
+
+    private SailRepository repo = null;
+    private SailRepositoryConnection conn = null;
+    private RyaDAO<?> dao = null;
+    private boolean initialized = false;
+
+    /**
+     * Initialize a SailExecutionStrategy with the given configuration.
+     * @param conf Defines Rya connection and query parameters; not null.
+     */
+    public SailExecutionStrategy(RdfCloudTripleStoreConfiguration conf) {
+        Preconditions.checkNotNull(conf);
+        this.conf = conf;
+    }
+
+    /**
+     * Executes a CONSTRUCT query through the SAIL and inserts the results into
+     * the DAO.
+     * @param rule A construct query; not null.
+     * @param metadata Metadata to add to any inferred triples; not null.
+     * @return The number of inferred triples.
+     * @throws ForwardChainException if query execution or data insert fails.
+     */
+    @Override
+    public long executeConstructRule(AbstractConstructRule rule,
+            StatementMetadata metadata) throws ForwardChainException {
+        Preconditions.checkNotNull(rule);
+        Preconditions.checkNotNull(metadata);
+        if (!initialized) {
+            initialize();
+        }
+        ParsedGraphQuery graphQuery = rule.getQuery();
+        long statementsAdded = 0;
+        logger.info("Applying inference rule " + rule + "...");
+        for (String line : graphQuery.getTupleExpr().toString().split("\n")) {
+            logger.debug("\t" + line);
+        }
+        InferredStatementHandler<?> handler = new 
InferredStatementHandler<>(dao, metadata);
+        try {
+            GraphQuery executableQuery = new SailGraphQuery(graphQuery, conn) 
{ };
+            executableQuery.evaluate(handler);
+            statementsAdded = handler.getNumStatementsAdded();
+            logger.info("Added " + statementsAdded + " inferred statements.");
+            return statementsAdded;
+        } catch (QueryEvaluationException e) {
+            throw new ForwardChainException("Error evaluating query portion of 
construct rule", e);
+        } catch (RDFHandlerException e) {
+            throw new ForwardChainException("Error processing results of 
construct rule", e);
+        }
+    }
+
+    /**
+     * Connect to the Rya SAIL. If a DAO wasn't provided, instantiate one from
+     * the configuration. 
+     * @throws ForwardChainException if connecting fails.
+     */
+    @Override
+    public void initialize() throws ForwardChainException {
+        try {
+            if (dao == null) {
+                dao = getDAO();
+            }
+            repo = new SailRepository(RyaSailFactory.getInstance(conf));
+            conn = repo.getConnection();
+            initialized = true;
+        } catch (Exception e) {
+            shutDown();
+            throw new ForwardChainException("Error connecting to SAIL", e);
+        }
+    }
+
+    private RyaDAO<?> getDAO() throws RyaDAOException, ForwardChainException {
+        if (ConfigUtils.getUseMongo(conf)) {
+            MongoDBRdfConfiguration mongoConf;
+            if (conf instanceof MongoDBRdfConfiguration) {
+                mongoConf = (MongoDBRdfConfiguration) conf;
+            }
+            else {
+                mongoConf = new MongoDBRdfConfiguration(conf);
+            }
+            return RyaSailFactory.getMongoDAO(mongoConf);
+        }
+        else {
+            AccumuloRdfConfiguration accumuloConf;
+            if (conf instanceof AccumuloRdfConfiguration) {
+                accumuloConf = (AccumuloRdfConfiguration) conf;
+            }
+            else {
+                accumuloConf = new AccumuloRdfConfiguration(conf);
+            }
+            try {
+                return RyaSailFactory.getAccumuloDAO(accumuloConf);
+            } catch (AccumuloException | AccumuloSecurityException e) {
+                throw new ForwardChainException(e);
+            }
+        }
+    }
+
+    /**
+     * Shut down the SAIL connection objects.
+     */
+    @Override
+    public void shutDown() {
+        initialized = false;
+        if (conn != null) {
+            try {
+                conn.close();
+            } catch (RepositoryException e) {
+                logger.warn("Error closing SailRepositoryConnection", e);
+            }
+        }
+        if (repo != null && repo.isInitialized()) {
+            try {
+                repo.shutDown();
+            } catch (RepositoryException e) {
+                logger.warn("Error shutting down SailRepository", e);
+            }
+        }
+        try {
+            if (dao != null && dao.isInitialized()) {
+                dao.flush();
+            }
+        } catch (RyaDAOException e) {
+            logger.warn("Error flushing DAO", e);
+        }
+    }
+
+    private static class InferredStatementHandler<T extends 
RdfCloudTripleStoreConfiguration> extends RDFHandlerBase {
+        private RyaDAO<T> dao;
+        private RyaQueryEngine<T> engine;
+        private long numStatementsAdded = 0;
+        private StatementMetadata metadata;
+
+        InferredStatementHandler(RyaDAO<T> dao, StatementMetadata metadata) {
+            this.dao = dao;
+            this.engine = dao.getQueryEngine();
+            this.metadata = metadata;
+            this.engine.setConf(dao.getConf());
+        }
+
+        @Override
+        public void handleStatement(Statement statement) {
+            RyaStatement ryaStatement = 
RdfToRyaConversions.convertStatement(statement);
+            ryaStatement.setStatementMetadata(metadata);
+            try {
+                // Need to check whether the statement already exists, because
+                // we need an accurate count of newly added statements.
+                CloseableIterable<RyaStatement> iter = engine.query(new 
RyaQuery(ryaStatement));
+                if (!iter.iterator().hasNext()) {
+                    dao.add(ryaStatement);
+                    numStatementsAdded++;
+                }
+            } catch (RyaDAOException e) {
+                logger.error("Error handling inferred statement", e);
+            }
+        }
+
+        public long getNumStatementsAdded() {
+            return numStatementsAdded;
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9f611019/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/batch/MongoSpinIT.java
----------------------------------------------------------------------
diff --git 
a/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/batch/MongoSpinIT.java
 
b/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/batch/MongoSpinIT.java
new file mode 100644
index 0000000..c70a025
--- /dev/null
+++ 
b/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/batch/MongoSpinIT.java
@@ -0,0 +1,169 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.forwardchain.batch;
+
+import java.io.BufferedReader;
+import java.io.InputStream;
+import java.net.URL;
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+import java.util.stream.Collectors;
+
+import org.apache.hadoop.util.ToolRunner;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import org.apache.rya.indexing.mongodb.MongoIndexingConfiguration;
+import 
org.apache.rya.indexing.mongodb.MongoIndexingConfiguration.MongoDBIndexingConfigBuilder;
+import org.apache.rya.mongodb.EmbeddedMongoFactory;
+import org.apache.rya.mongodb.MongoDBRdfConfiguration;
+import org.apache.rya.sail.config.RyaSailFactory;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.ValueFactoryImpl;
+import org.openrdf.query.BindingSet;
+import org.openrdf.query.QueryLanguage;
+import org.openrdf.query.TupleQuery;
+import org.openrdf.query.TupleQueryResult;
+import org.openrdf.query.impl.ListBindingSet;
+import org.openrdf.repository.RepositoryException;
+import org.openrdf.repository.sail.SailRepository;
+import org.openrdf.repository.sail.SailRepositoryConnection;
+import org.openrdf.rio.RDFFormat;
+import org.openrdf.rio.Rio;
+
+import com.google.common.io.Resources;
+import com.mongodb.MongoClient;
+import com.mongodb.ServerAddress;
+
+public class MongoSpinIT {
+    private static final ValueFactory VF = ValueFactoryImpl.getInstance();
+    private static final String EX = "http://example.org/";;
+
+    private MongoDBRdfConfiguration conf;
+    private SailRepository repository;
+
+    @Before
+    public void setup() throws Exception {
+        Logger.getLogger("org.apache.rya.mongodb").setLevel(Level.WARN);
+        Logger.getLogger("org.apache.rya.forwardchain").setLevel(Level.INFO);
+        conf = getConf();
+        repository = new SailRepository(RyaSailFactory.getInstance(conf));
+    }
+
+    @After
+    public void tearDown() throws Exception {
+        if (repository != null) {
+            try {
+                repository.shutDown();
+            } catch (final RepositoryException e) {
+                // quietly absorb this exception
+            }
+        }
+    }
+
+    @Test
+    public void testSailStrategy() throws Exception {
+        insertDataFile(Resources.getResource("data.ttl"), 
"http://example.org#";);
+        insertDataFile(Resources.getResource("university.ttl"), 
"http://example.org#";);
+        insertDataFile(Resources.getResource("owlrl.ttl"), 
"http://example.org#";);
+        Set<BindingSet> solutions = 
executeQuery(Resources.getResource("query.sparql"));
+        Set<BindingSet> expected = new HashSet<>();
+        Assert.assertEquals(expected, solutions);
+        conf.setUseAggregationPipeline(false);
+        ForwardChainSpinTool tool = new ForwardChainSpinTool();
+        ToolRunner.run(conf, tool, new String[] {});
+        solutions = executeQuery(Resources.getResource("query.sparql"));
+        expected.add(new ListBindingSet(Arrays.asList("X", "Y"),
+            VF.createURI(EX, "Alice"), VF.createURI(EX, "Department1")));
+        Assert.assertEquals(expected, solutions);
+        Assert.assertEquals(24, tool.getNumInferences());
+    }
+
+    @Test
+    public void testPipelineStrategy() throws Exception {
+        insertDataFile(Resources.getResource("data.ttl"), 
"http://example.org#";);
+        insertDataFile(Resources.getResource("university.ttl"), 
"http://example.org#";);
+        insertDataFile(Resources.getResource("owlrl.ttl"), 
"http://example.org#";);
+        Set<BindingSet> solutions = 
executeQuery(Resources.getResource("query.sparql"));
+        Set<BindingSet> expected = new HashSet<>();
+        Assert.assertEquals(expected, solutions);
+        conf.setUseAggregationPipeline(true);
+        ForwardChainSpinTool tool = new ForwardChainSpinTool();
+        ToolRunner.run(conf, tool, new String[] {});
+        solutions = executeQuery(Resources.getResource("query.sparql"));
+        expected.add(new ListBindingSet(Arrays.asList("X", "Y"),
+            VF.createURI(EX, "Alice"), VF.createURI(EX, "Department1")));
+        Assert.assertEquals(expected, solutions);
+        Assert.assertEquals(24, tool.getNumInferences());
+    }
+
+    private void insertDataFile(URL dataFile, String defaultNamespace) throws 
Exception {
+        RDFFormat format = Rio.getParserFormatForFileName(dataFile.getFile());
+        SailRepositoryConnection conn = repository.getConnection();
+        try {
+            conn.add(dataFile, defaultNamespace, format);
+        } finally {
+            closeQuietly(conn);
+        }
+    }
+
+    Set<BindingSet> executeQuery(URL queryFile) throws Exception {
+        SailRepositoryConnection conn = repository.getConnection();
+        try {
+            InputStream queryIS = queryFile.openStream();
+            BufferedReader br = new BufferedReader(new 
java.io.InputStreamReader(queryIS, "UTF-8"));
+            String query = br.lines().collect(Collectors.joining("\n"));
+            br.close();
+            TupleQuery tupleQuery = 
conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+            TupleQueryResult result = tupleQuery.evaluate();
+            Set<BindingSet> solutions = new HashSet<>();
+            while (result.hasNext()) {
+                solutions.add(result.next());
+            }
+            return solutions;
+        } finally {
+            closeQuietly(conn);
+        }
+    }
+
+    private static MongoDBRdfConfiguration getConf() throws Exception {
+        MongoDBIndexingConfigBuilder builder = 
MongoIndexingConfiguration.builder().setUseMockMongo(true);
+        final MongoClient c = 
EmbeddedMongoFactory.newFactory().newMongoClient();
+        final ServerAddress address = c.getAddress();
+        builder.setMongoHost(address.getHost());
+        builder.setMongoPort(Integer.toString(address.getPort()));
+        builder.setUseInference(false);
+        c.close();
+        return builder.build();
+    }
+
+    private static void closeQuietly(final SailRepositoryConnection conn) {
+        if (conn != null) {
+            try {
+                conn.close();
+            } catch (final RepositoryException e) {
+                // quietly absorb this exception
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9f611019/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/rule/AntecedentVisitorTest.java
----------------------------------------------------------------------
diff --git 
a/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/rule/AntecedentVisitorTest.java
 
b/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/rule/AntecedentVisitorTest.java
new file mode 100644
index 0000000..7761a1a
--- /dev/null
+++ 
b/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/rule/AntecedentVisitorTest.java
@@ -0,0 +1,156 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.forwardchain.rule;
+
+import java.util.Set;
+
+import org.junit.Assert;
+import org.junit.Test;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.ValueFactoryImpl;
+import org.openrdf.model.vocabulary.FOAF;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.model.vocabulary.RDFS;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Var;
+import org.openrdf.query.algebra.StatementPattern.Scope;
+import org.openrdf.query.parser.ParsedQuery;
+import org.openrdf.query.parser.sparql.SPARQLParser;
+
+import com.google.common.collect.Sets;
+
+public class AntecedentVisitorTest {
+    private static Var c(Value val) {
+        Var v = new Var("-const-" + val.stringValue(), val);
+        v.setAnonymous(true);
+        return v;
+    }
+
+    private static ValueFactory VF = ValueFactoryImpl.getInstance();
+    private static String EX = "http://example.org/";;
+    private static URI G1 = VF.createURI(EX, "Graph1");
+    private static URI G2 = VF.createURI(EX, "Graph2");
+
+    @Test
+    public void testSelectQuery() throws Exception {
+        String text = "PREFIX foaf: <" + FOAF.NAMESPACE + ">\n"
+                + "SELECT * WHERE {\n"
+                + "  ?x a foaf:Person .\n"
+                + "  ?y a foaf:Person .\n"
+                + "  ?x foaf:knows ?y .\n"
+                + "}";
+        ParsedQuery query = new SPARQLParser().parseQuery(text, null);
+        AntecedentVisitor visitor = new AntecedentVisitor();
+        query.getTupleExpr().visit(visitor);
+        Set<StatementPattern> expected = Sets.newHashSet(
+                new StatementPattern(new Var("x"), c(RDF.TYPE), 
c(FOAF.PERSON)),
+                new StatementPattern(new Var("y"), c(RDF.TYPE), 
c(FOAF.PERSON)),
+                new StatementPattern(new Var("x"), c(FOAF.KNOWS), new 
Var("y")));
+        Assert.assertEquals(expected, visitor.getAntecedents());
+    }
+
+    @Test
+    public void testConstructQuery() throws Exception {
+        String text = "PREFIX foaf: <" + FOAF.NAMESPACE + ">\n"
+                + "CONSTRUCT {\n"
+                + "  ?y foaf:knows ?x .\n"
+                + "  ?y <urn:knows> ?x .\n"
+                + "  ?x <urn:knows> ?y .\n"
+                + "} WHERE {\n"
+                + "  ?x a foaf:Person .\n"
+                + "  ?y a foaf:Person .\n"
+                + "  ?x foaf:knows ?y .\n"
+                + "}";
+        ParsedQuery query = new SPARQLParser().parseQuery(text, null);
+        AntecedentVisitor visitor = new AntecedentVisitor();
+        query.getTupleExpr().visit(visitor);
+        Set<StatementPattern> expected = Sets.newHashSet(
+                new StatementPattern(new Var("x"), c(RDF.TYPE), 
c(FOAF.PERSON)),
+                new StatementPattern(new Var("y"), c(RDF.TYPE), 
c(FOAF.PERSON)),
+                new StatementPattern(new Var("x"), c(FOAF.KNOWS), new 
Var("y")));
+        Assert.assertEquals(expected, visitor.getAntecedents());
+    }
+
+    @Test
+    public void testComplexQuery() throws Exception {
+        String text = "PREFIX foaf: <" + FOAF.NAMESPACE + ">\n"
+                + "PREFIX ex: <" + EX + ">\n"
+                + "SELECT * WHERE {\n"
+                + "  { ?x a foaf:Person } UNION {\n"
+                + "    GRAPH ex:Graph1 { ?y a foaf:Person }\n"
+                + "  } .\n"
+                + "  GRAPH ex:Graph2 {\n"
+                + "    ?x foaf:knows ?y .\n"
+                + "  }\n ."
+                + "  OPTIONAL { ?x foaf:mbox ?m } .\n"
+                + "  FILTER (?x != ?y) .\n"
+                + "}";
+        ParsedQuery query = new SPARQLParser().parseQuery(text, null);
+        AntecedentVisitor visitor = new AntecedentVisitor();
+        query.getTupleExpr().visit(visitor);
+        Set<StatementPattern> expected = Sets.newHashSet(
+                new StatementPattern(Scope.NAMED_CONTEXTS, new Var("y"), 
c(RDF.TYPE), c(FOAF.PERSON), c(G1)),
+                new StatementPattern(new Var("x"), c(RDF.TYPE), 
c(FOAF.PERSON)),
+                new StatementPattern(Scope.NAMED_CONTEXTS, new Var("x"), 
c(FOAF.KNOWS), new Var("y"), c(G2)),
+                new StatementPattern(new Var("x"), c(FOAF.MBOX), new 
Var("m")));
+        Assert.assertEquals(expected, visitor.getAntecedents());
+    }
+
+    @Test
+    public void testBNodeQuery() throws Exception {
+        String text = "PREFIX foaf: <" + FOAF.NAMESPACE + ">\n"
+                + "SELECT * WHERE {\n"
+                + "  ?x a [ rdfs:subClassOf foaf:Person ] .\n"
+                + "  ?x foaf:knows ?y .\n"
+                + "}";
+        ParsedQuery query = new SPARQLParser().parseQuery(text, null);
+        AntecedentVisitor visitor = new AntecedentVisitor();
+        query.getTupleExpr().visit(visitor);
+        Set<StatementPattern> actual = visitor.getAntecedents();
+        Assert.assertEquals(3, actual.size());
+        StatementPattern knows = new StatementPattern(new Var("x"), 
c(FOAF.KNOWS), new Var("y"));
+        Assert.assertTrue(actual.remove(knows));
+        Assert.assertTrue(actual.removeIf(sp -> {
+            return sp.getSubjectVar().equals(new Var("x"))
+                    && RDF.TYPE.equals(sp.getPredicateVar().getValue())
+                    && sp.getObjectVar().getValue() == null;
+        }));
+        Assert.assertTrue(actual.removeIf(sp -> {
+            return sp.getSubjectVar().getValue() == null
+                    && RDFS.SUBCLASSOF.equals(sp.getPredicateVar().getValue())
+                    && FOAF.PERSON.equals(sp.getObjectVar().getValue());
+        }));
+    }
+
+    @Test
+    public void testNoSP() throws Exception {
+        String text = "CONSTRUCT {\n"
+                + "  owl:Thing a owl:Class ."
+                + "  owl:Nothing a owl:Class ."
+                + "  owl:Nothing rdfs:subClassOf owl:Thing ."
+                + "} WHERE { }";
+        ParsedQuery query = new SPARQLParser().parseQuery(text, null);
+        AntecedentVisitor visitor = new AntecedentVisitor();
+        query.getTupleExpr().visit(visitor);
+        Set<StatementPattern> expected = Sets.newHashSet();
+        Assert.assertEquals(expected, visitor.getAntecedents());
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9f611019/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/rule/ConstructConsequentVisitorTest.java
----------------------------------------------------------------------
diff --git 
a/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/rule/ConstructConsequentVisitorTest.java
 
b/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/rule/ConstructConsequentVisitorTest.java
new file mode 100644
index 0000000..0865ef8
--- /dev/null
+++ 
b/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/rule/ConstructConsequentVisitorTest.java
@@ -0,0 +1,164 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.forwardchain.rule;
+
+import java.util.Arrays;
+import java.util.Set;
+
+import org.junit.Assert;
+import org.junit.Test;
+import org.openrdf.model.Value;
+import org.openrdf.model.vocabulary.FOAF;
+import org.openrdf.model.vocabulary.OWL;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.query.algebra.BNodeGenerator;
+import org.openrdf.query.algebra.Extension;
+import org.openrdf.query.algebra.ExtensionElem;
+import org.openrdf.query.algebra.MultiProjection;
+import org.openrdf.query.algebra.Projection;
+import org.openrdf.query.algebra.ProjectionElem;
+import org.openrdf.query.algebra.ProjectionElemList;
+import org.openrdf.query.algebra.SingletonSet;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.ValueConstant;
+import org.openrdf.query.algebra.Var;
+
+import com.google.common.collect.Sets;
+
+public class ConstructConsequentVisitorTest {
+    private static Var s(Value val) {
+        return new Var("subject", val);
+    }
+    private static Var p(Value val) {
+        return new Var("predicate", val);
+    }
+    private static Var o(Value val) {
+        return new Var("object", val);
+    }
+    private static Var anon(Var var) {
+        var.setAnonymous(true);
+        return var;
+    }
+
+    @Test
+    public void testGenericSP() {
+        Extension extension = new Extension(new SingletonSet(),
+                new ExtensionElem(new Var("z"), "z"));
+        Projection projection = new Projection(extension, new 
ProjectionElemList(
+                new ProjectionElem("x", "subject"),
+                new ProjectionElem("y", "predicate"),
+                new ProjectionElem("z", "object")));
+        ConstructConsequentVisitor visitor = new ConstructConsequentVisitor();
+        projection.visit(visitor);
+        Set<StatementPattern> expected = Sets.newHashSet(
+                new StatementPattern(s(null), p(null), o(null)));
+        Assert.assertEquals(expected, visitor.getConsequents());
+    }
+
+    @Test
+    public void testConcreteSP() {
+        Extension extension = new Extension(new SingletonSet(),
+                new ExtensionElem(new ValueConstant(FOAF.PERSON), "x"),
+                new ExtensionElem(new ValueConstant(RDF.TYPE), "y"),
+                new ExtensionElem(new ValueConstant(OWL.CLASS), "z"));
+        Projection projection = new Projection(extension, new 
ProjectionElemList(
+                new ProjectionElem("x", "subject"),
+                new ProjectionElem("y", "predicate"),
+                new ProjectionElem("z", "object")));
+        ConstructConsequentVisitor visitor = new ConstructConsequentVisitor();
+        projection.visit(visitor);
+        Set<StatementPattern> expected = Sets.newHashSet(
+                new StatementPattern(s(FOAF.PERSON), p(RDF.TYPE), 
o(OWL.CLASS)));
+        Assert.assertEquals(expected, visitor.getConsequents());
+    }
+
+    @Test
+    public void testMissingVariables() {
+        Extension extension = new Extension(new SingletonSet(),
+                new ExtensionElem(new ValueConstant(FOAF.PERSON), "x"),
+                new ExtensionElem(new ValueConstant(RDF.TYPE), "y"));
+        Projection projection = new Projection(extension, new 
ProjectionElemList(
+                new ProjectionElem("x", "s"),
+                new ProjectionElem("y", "predicate"),
+                new ProjectionElem("z", "object")));
+        ConstructConsequentVisitor visitor = new ConstructConsequentVisitor();
+        projection.visit(visitor);
+        Set<StatementPattern> expected = Sets.newHashSet(
+                new StatementPattern(s(null), p(RDF.TYPE), o(null)));
+        Assert.assertEquals(expected, visitor.getConsequents());
+    }
+
+    @Test
+    public void testMultiProjection() {
+        Extension extension = new Extension(new SingletonSet(),
+                new ExtensionElem(new ValueConstant(RDF.TYPE), "rdftype"),
+                new ExtensionElem(new ValueConstant(OWL.OBJECTPROPERTY), 
"owlprop"),
+                new ExtensionElem(new ValueConstant(OWL.EQUIVALENTCLASS), 
"owleqcls"),
+                new ExtensionElem(new ValueConstant(OWL.CLASS), "owlclass"));
+        MultiProjection projection = new MultiProjection(extension, 
Arrays.asList(
+                new ProjectionElemList(
+                        new ProjectionElem("cls", "subject"),
+                        new ProjectionElem("rdftype", "predicate"),
+                        new ProjectionElem("owlclass", "object")),
+                new ProjectionElemList(
+                        new ProjectionElem("prop", "subject"),
+                        new ProjectionElem("rdftype", "predicate"),
+                        new ProjectionElem("owlprop", "object")),
+                new ProjectionElemList(
+                        new ProjectionElem("owleqcls", "predicate"),
+                        new ProjectionElem("cls", "object"))));
+        ConstructConsequentVisitor visitor = new ConstructConsequentVisitor();
+        projection.visit(visitor);
+        Set<StatementPattern> expected = Sets.newHashSet(
+                new StatementPattern(s(null), p(RDF.TYPE), o(OWL.CLASS)),
+                new StatementPattern(s(null), p(RDF.TYPE), 
o(OWL.OBJECTPROPERTY)),
+                new StatementPattern(s(null), p(OWL.EQUIVALENTCLASS), 
o(null)));
+        Assert.assertEquals(expected, visitor.getConsequents());
+    }
+
+    @Test
+    public void testNoExtension() {
+        StatementPattern sp = new StatementPattern(new Var("x"), new Var("y"), 
new Var("z"));
+        Projection projection = new Projection(sp, new ProjectionElemList(
+                new ProjectionElem("x", "subject"),
+                new ProjectionElem("y", "predicate"),
+                new ProjectionElem("z", "object")));
+        ConstructConsequentVisitor visitor = new ConstructConsequentVisitor();
+        projection.visit(visitor);
+        Set<StatementPattern> expected = Sets.newHashSet(
+                new StatementPattern(s(null), p(null), o(null)));
+        Assert.assertEquals(expected, visitor.getConsequents());
+    }
+
+    @Test
+    public void testBNode() {
+        Extension extension = new Extension(new SingletonSet(),
+                new ExtensionElem(new Var("x"), "x"),
+                new ExtensionElem(new BNodeGenerator(), "z"));
+        Projection projection = new Projection(extension, new 
ProjectionElemList(
+                new ProjectionElem("x", "subject"),
+                new ProjectionElem("y", "predicate"),
+                new ProjectionElem("z", "object")));
+        ConstructConsequentVisitor visitor = new ConstructConsequentVisitor();
+        projection.visit(visitor);
+        Set<StatementPattern> expected = Sets.newHashSet(
+                new StatementPattern(s(null), p(null), anon(o(null))));
+        Assert.assertEquals(expected, visitor.getConsequents());
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9f611019/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/rule/RulesetTest.java
----------------------------------------------------------------------
diff --git 
a/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/rule/RulesetTest.java
 
b/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/rule/RulesetTest.java
new file mode 100644
index 0000000..adb851b
--- /dev/null
+++ 
b/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/rule/RulesetTest.java
@@ -0,0 +1,137 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.forwardchain.rule;
+
+import java.util.Collection;
+import java.util.Set;
+
+import org.apache.rya.api.domain.StatementMetadata;
+import org.apache.rya.forwardchain.ForwardChainException;
+import org.apache.rya.forwardchain.strategy.AbstractRuleExecutionStrategy;
+import org.junit.Assert;
+import org.junit.Test;
+import org.openrdf.model.Value;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.model.vocabulary.RDFS;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Var;
+
+import com.google.common.collect.Sets;
+
+public class RulesetTest {
+    private static Var c(Value val) {
+        Var v = new Var("-const-" + val.stringValue(), val);
+        v.setAnonymous(true);
+        return v;
+    }
+
+    private static class TestRule implements Rule {
+        private final Collection<StatementPattern> consume;
+        private final Collection<StatementPattern> produce;
+        TestRule(Collection<StatementPattern> consume, 
Collection<StatementPattern> produce) {
+            this.consume = consume;
+            this.produce = produce;
+        }
+        @Override
+        public boolean canConclude(StatementPattern sp) {
+            return produce.contains(sp);
+        }
+        @Override
+        public Collection<StatementPattern> getAntecedentPatterns() {
+            return consume;
+        }
+        @Override
+        public Collection<StatementPattern> getConsequentPatterns() {
+            return produce;
+        }
+        @Override
+        public long execute(AbstractRuleExecutionStrategy strategy,
+                StatementMetadata metadata) throws ForwardChainException {
+            return 0;
+        }
+    }
+
+    @Test
+    public void testDependencies() {
+        StatementPattern genericSP = new StatementPattern(new Var("a"), new 
Var("b"), new Var("c"));
+        StatementPattern typeSP = new StatementPattern(new Var("x"), 
c(RDF.TYPE), new Var("t"));
+        StatementPattern scoSP = new StatementPattern(new Var("x"), 
c(RDFS.SUBCLASSOF), new Var("y"));
+        Rule typeTriggersAny = new TestRule(
+                Sets.newHashSet(typeSP),
+                Sets.newHashSet(genericSP, typeSP, scoSP));
+        Rule subclassTriggersType = new TestRule(
+                Sets.newHashSet(scoSP),
+                Sets.newHashSet(genericSP, typeSP));
+        Rule anyTriggersNothing = new TestRule(
+                Sets.newHashSet(genericSP),
+                Sets.newHashSet());
+        Set<Rule> allRules = Sets.newHashSet(anyTriggersNothing, 
subclassTriggersType, typeTriggersAny);
+        Set<Rule> noRules = Sets.newHashSet();
+        Set<Rule> produceType = Sets.newHashSet(subclassTriggersType, 
typeTriggersAny);
+        Set<Rule> produceSubclass = Sets.newHashSet(typeTriggersAny);
+        Set<Rule> produceAny = Sets.newHashSet(subclassTriggersType, 
typeTriggersAny);
+        Set<Rule> consumeType = Sets.newHashSet(anyTriggersNothing, 
typeTriggersAny);
+        Ruleset ruleset = new Ruleset(allRules);
+        Assert.assertEquals(produceType, 
ruleset.getPredecessorsOf(typeTriggersAny));
+        Assert.assertEquals(allRules, 
ruleset.getSuccessorsOf(typeTriggersAny));
+        Assert.assertEquals(produceSubclass, 
ruleset.getPredecessorsOf(subclassTriggersType));
+        Assert.assertEquals(consumeType, 
ruleset.getSuccessorsOf(subclassTriggersType));
+        Assert.assertEquals(produceAny, 
ruleset.getPredecessorsOf(anyTriggersNothing));
+        Assert.assertEquals(noRules, 
ruleset.getSuccessorsOf(anyTriggersNothing));
+    }
+
+    @Test
+    public void testIndirectDependencies() {
+        StatementPattern genericSP = new StatementPattern(new Var("a"), new 
Var("b"), new Var("c"));
+        StatementPattern typeSP = new StatementPattern(new Var("x"), 
c(RDF.TYPE), new Var("t"));
+        StatementPattern scoSP = new StatementPattern(new Var("x"), 
c(RDFS.SUBCLASSOF), new Var("y"));
+        StatementPattern spoSP = new StatementPattern(new Var("x"), 
c(RDFS.SUBPROPERTYOF), new Var("y"));
+        Rule typeTriggersAny = new TestRule(
+                Sets.newHashSet(typeSP),
+                Sets.newHashSet(genericSP, typeSP, scoSP));
+        Rule subclassTriggersType = new TestRule(
+                Sets.newHashSet(scoSP),
+                Sets.newHashSet(genericSP, typeSP));
+        Rule anyTriggersNothing = new TestRule(
+                Sets.newHashSet(genericSP),
+                Sets.newHashSet());
+        Rule typeTriggersSubprop = new TestRule(
+                Sets.newHashSet(typeSP),
+                Sets.newHashSet(genericSP, spoSP));
+        Set<Rule> allRules = Sets.newHashSet(anyTriggersNothing, 
subclassTriggersType,
+                typeTriggersAny, typeTriggersSubprop);
+        Ruleset ruleset = new Ruleset(allRules);
+        Assert.assertTrue(ruleset.pathExists(typeTriggersAny, 
typeTriggersAny));
+        Assert.assertTrue(ruleset.pathExists(typeTriggersAny, 
subclassTriggersType));
+        Assert.assertTrue(ruleset.pathExists(typeTriggersAny, 
anyTriggersNothing));
+        Assert.assertTrue(ruleset.pathExists(typeTriggersAny, 
typeTriggersSubprop));
+        Assert.assertTrue(ruleset.pathExists(subclassTriggersType, 
typeTriggersAny));
+        Assert.assertTrue(ruleset.pathExists(subclassTriggersType, 
subclassTriggersType));
+        Assert.assertTrue(ruleset.pathExists(subclassTriggersType, 
anyTriggersNothing));
+        Assert.assertTrue(ruleset.pathExists(subclassTriggersType, 
typeTriggersSubprop));
+        Assert.assertFalse(ruleset.pathExists(anyTriggersNothing, 
typeTriggersAny));
+        Assert.assertFalse(ruleset.pathExists(anyTriggersNothing, 
subclassTriggersType));
+        Assert.assertFalse(ruleset.pathExists(anyTriggersNothing, 
anyTriggersNothing));
+        Assert.assertFalse(ruleset.pathExists(anyTriggersNothing, 
typeTriggersSubprop));
+        Assert.assertFalse(ruleset.pathExists(typeTriggersSubprop, 
typeTriggersAny));
+        Assert.assertFalse(ruleset.pathExists(typeTriggersSubprop, 
subclassTriggersType));
+        Assert.assertTrue(ruleset.pathExists(typeTriggersSubprop, 
anyTriggersNothing));
+        Assert.assertFalse(ruleset.pathExists(typeTriggersSubprop, 
typeTriggersSubprop));
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9f611019/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/rule/SpinConstructRuleTest.java
----------------------------------------------------------------------
diff --git 
a/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/rule/SpinConstructRuleTest.java
 
b/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/rule/SpinConstructRuleTest.java
new file mode 100644
index 0000000..9bbcce0
--- /dev/null
+++ 
b/extras/rya.forwardchain/src/test/java/org/apache/rya/forwardchain/rule/SpinConstructRuleTest.java
@@ -0,0 +1,213 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.rya.forwardchain.rule;
+
+import java.util.Arrays;
+
+import org.junit.Assert;
+import org.junit.Test;
+import org.openrdf.model.URI;
+import org.openrdf.model.Value;
+import org.openrdf.model.ValueFactory;
+import org.openrdf.model.impl.ValueFactoryImpl;
+import org.openrdf.model.vocabulary.FOAF;
+import org.openrdf.model.vocabulary.OWL;
+import org.openrdf.model.vocabulary.RDF;
+import org.openrdf.model.vocabulary.RDFS;
+import org.openrdf.query.algebra.StatementPattern;
+import org.openrdf.query.algebra.Var;
+import org.openrdf.query.parser.ParsedGraphQuery;
+import org.openrdf.query.parser.sparql.SPARQLParser;
+
+import com.google.common.collect.HashMultiset;
+import com.google.common.collect.Multiset;
+
+public class SpinConstructRuleTest {
+    private static ValueFactory VF = ValueFactoryImpl.getInstance();
+    private static SPARQLParser PARSER = new SPARQLParser();
+
+    private static URI RL_CAX_SCO = 
VF.createURI("http://example.org/rl/cax-sco";);
+    private static URI RL_SCM_CLS = 
VF.createURI("http://example.org/rl/scm-cls";);
+    private static URI RL_PRP_SPO1 = 
VF.createURI("http://example.org/rl/prp-spo";);
+    private static URI LIVING_THING = 
VF.createURI("http://example.org/LivingThing";);
+
+    private static Var c(Value val) {
+        return new Var("-const-" + val.stringValue(), val);
+    }
+    private static Var ac(Value val) {
+        Var v = c(val);
+        v.setAnonymous(true);
+        return v;
+    }
+
+    @Test
+    public void testEmptyWhere() throws Exception {
+        String text = "CONSTRUCT {\n"
+                + "  ?this a <" + LIVING_THING.stringValue() + "> .\n"
+                + "} WHERE { }";
+        ParsedGraphQuery query = (ParsedGraphQuery) PARSER.parseQuery(text, 
null);
+        SpinConstructRule rule = new SpinConstructRule(FOAF.PERSON, 
VF.createURI("urn:person-is-living"), query);
+        Multiset<StatementPattern> expectedAntecedents = 
HashMultiset.create(Arrays.asList(
+                new StatementPattern(new Var("this"), c(RDF.TYPE), 
c(FOAF.PERSON))));
+        Multiset<StatementPattern> expectedConsequents = 
HashMultiset.create(Arrays.asList(
+                new StatementPattern(new Var("subject"), new Var("predicate", 
RDF.TYPE), new Var("object", LIVING_THING))));
+        Assert.assertEquals(expectedAntecedents, 
HashMultiset.create(rule.getAntecedentPatterns()));
+        Assert.assertEquals(expectedConsequents, 
HashMultiset.create(rule.getConsequentPatterns()));
+        Assert.assertFalse(rule.hasAnonymousConsequent());
+        // Basic pattern matches
+        Assert.assertTrue(rule.canConclude(new StatementPattern(new Var("x"), 
c(RDF.TYPE), c(LIVING_THING))));
+        // Broader patterns match (variables in place of constants)
+        Assert.assertTrue(rule.canConclude(new StatementPattern(new Var("x"), 
c(RDF.TYPE), new Var("y"))));
+        Assert.assertTrue(rule.canConclude(new StatementPattern(new Var("x"), 
new Var("y"), c(LIVING_THING))));
+        Assert.assertTrue(rule.canConclude(new StatementPattern(new Var("a"), 
new Var("b"), new Var("c"))));
+        // Narrower patterns match (constants in place of variables)
+        Assert.assertTrue(rule.canConclude(new StatementPattern(c(RDF.TYPE), 
c(RDF.TYPE), c(LIVING_THING))));
+        Assert.assertTrue(rule.canConclude(new StatementPattern(c(FOAF.MBOX), 
c(RDF.TYPE), new Var("y"))));
+        Assert.assertTrue(rule.canConclude(new StatementPattern(c(RDF.ALT), 
new Var("y"), c(LIVING_THING))));
+        Assert.assertTrue(rule.canConclude(new StatementPattern(c(RDF.BAG), 
new Var("b"), new Var("c"))));
+        // Incompatible patterns don't match (different constants)
+        Assert.assertFalse(rule.canConclude(new StatementPattern(new Var("x"), 
c(RDFS.SUBCLASSOF), new Var("y"))));
+        Assert.assertFalse(rule.canConclude(new StatementPattern(new Var("x"), 
new Var("y"), c(FOAF.PERSON))));
+        Assert.assertFalse(rule.canConclude(new StatementPattern(c(RDF.TYPE), 
c(RDF.TYPE), c(RDF.TYPE))));
+    }
+
+    @Test
+    public void testThisUnbound() throws Exception {
+        String text = "CONSTRUCT {\n"
+                + "  ?ind a ?superclass .\n"
+                + "} WHERE {\n"
+                + "  ?ind a ?subclass .\n"
+                + "  ?subclass rdfs:subClassOf ?superclass .\n"
+                + "}";
+        ParsedGraphQuery query = (ParsedGraphQuery) PARSER.parseQuery(text, 
null);
+        SpinConstructRule rule = new SpinConstructRule(OWL.THING, RL_CAX_SCO, 
query);
+        Multiset<StatementPattern> expectedAntecedents = 
HashMultiset.create(Arrays.asList(
+                new StatementPattern(new Var("subclass"), ac(RDFS.SUBCLASSOF), 
new Var("superclass")),
+                new StatementPattern(new Var("ind"), ac(RDF.TYPE), new 
Var("subclass"))));
+        Multiset<StatementPattern> expectedConsequents = 
HashMultiset.create(Arrays.asList(
+                new StatementPattern(new Var("subject"), new Var("predicate", 
RDF.TYPE), new Var("object"))));
+        Assert.assertEquals(expectedAntecedents, 
HashMultiset.create(rule.getAntecedentPatterns()));
+        Assert.assertEquals(expectedConsequents, 
HashMultiset.create(rule.getConsequentPatterns()));
+        Assert.assertFalse(rule.hasAnonymousConsequent());
+        // Basic pattern matches
+        Assert.assertTrue(rule.canConclude(new StatementPattern(new Var("x"), 
c(RDF.TYPE), new Var("y"))));
+        // Broader patterns match (variables in place of constants)
+        Assert.assertTrue(rule.canConclude(new StatementPattern(new Var("a"), 
new Var("b"), new Var("c"))));
+        // Narrower patterns match (constants in place of variables)
+        Assert.assertTrue(rule.canConclude(new StatementPattern(c(RDF.TYPE), 
c(RDF.TYPE), c(RDF.TYPE))));
+        // Incompatible patterns don't match (different constants)
+        Assert.assertFalse(rule.canConclude(new StatementPattern(new Var("x"), 
c(RDFS.SUBCLASSOF), new Var("y"))));
+    }
+
+    @Test
+    public void testMultipleConsequents() throws Exception {
+        String text = "CONSTRUCT {\n"
+                // actual rule is "?this subClassOf ?this", but reflexive 
construct patterns produce
+                // bnodes due to an openrdf bug, resulting in incorrect matches
+                + "  ?this rdfs:subClassOf ?something .\n"
+                + "  ?this owl:equivalentClass ?something .\n"
+                + "  ?this rdfs:subClassOf owl:Thing .\n"
+                + "  owl:Nothing rdfs:subClassOf ?this .\n"
+                + "} WHERE { }";
+        ParsedGraphQuery query = (ParsedGraphQuery) PARSER.parseQuery(text, 
null);
+        SpinConstructRule rule = new SpinConstructRule(OWL.CLASS, RL_SCM_CLS, 
query);
+        Multiset<StatementPattern> expectedAntecedents = 
HashMultiset.create(Arrays.asList(
+                new StatementPattern(new Var("this"), c(RDF.TYPE), 
c(OWL.CLASS))));
+        Multiset<StatementPattern> expectedConsequents = 
HashMultiset.create(Arrays.asList(
+                new StatementPattern(new Var("subject"), new Var("predicate", 
RDFS.SUBCLASSOF), new Var("object")),
+                new StatementPattern(new Var("subject"), new Var("predicate", 
OWL.EQUIVALENTCLASS), new Var("object")),
+                new StatementPattern(new Var("subject"), new Var("predicate", 
RDFS.SUBCLASSOF), new Var("object", OWL.THING)),
+                new StatementPattern(new Var("subject", OWL.NOTHING), new 
Var("predicate", RDFS.SUBCLASSOF), new Var("object"))));
+        Assert.assertEquals(expectedAntecedents, 
HashMultiset.create(rule.getAntecedentPatterns()));
+        Assert.assertEquals(expectedConsequents, 
HashMultiset.create(rule.getConsequentPatterns()));
+        // Basic pattern matches
+        Assert.assertTrue(rule.canConclude(new StatementPattern(new Var("x"), 
c(RDFS.SUBCLASSOF), new Var("y"))));
+        Assert.assertTrue(rule.canConclude(new StatementPattern(new Var("x"), 
c(OWL.EQUIVALENTCLASS), new Var("y"))));
+        Assert.assertTrue(rule.canConclude(new StatementPattern(new Var("x"), 
c(RDFS.SUBCLASSOF), c(OWL.THING))));
+        Assert.assertTrue(rule.canConclude(new 
StatementPattern(c(OWL.NOTHING), c(RDFS.SUBCLASSOF), new Var("y"))));
+        // Broader patterns match (variables in place of constants)
+        Assert.assertTrue(rule.canConclude(new StatementPattern(new Var("a"), 
new Var("b"), new Var("c"))));
+        Assert.assertTrue(rule.canConclude(new StatementPattern(new Var("a"), 
new Var("b"), c(OWL.THING))));
+        Assert.assertTrue(rule.canConclude(new 
StatementPattern(c(OWL.NOTHING), new Var("b"), new Var("c"))));
+        // Narrower patterns match (constants in place of variables)
+        Assert.assertTrue(rule.canConclude(new 
StatementPattern(c(FOAF.PERSON), c(RDFS.SUBCLASSOF), new Var("x"))));
+        Assert.assertTrue(rule.canConclude(new 
StatementPattern(c(FOAF.PERSON), c(OWL.EQUIVALENTCLASS), c(FOAF.PERSON))));
+        Assert.assertTrue(rule.canConclude(new 
StatementPattern(c(OWL.NOTHING), c(RDFS.SUBCLASSOF), c(FOAF.PERSON))));
+        Assert.assertTrue(rule.canConclude(new 
StatementPattern(c(OWL.NOTHING), c(OWL.EQUIVALENTCLASS), c(FOAF.PERSON))));
+        Assert.assertTrue(rule.canConclude(new 
StatementPattern(c(OWL.NOTHING), c(OWL.EQUIVALENTCLASS), c(OWL.THING))));
+        // Incompatible patterns don't match (different constants)
+        Assert.assertFalse(rule.canConclude(new StatementPattern(new Var("x"), 
c(RDFS.SUBPROPERTYOF), c(OWL.THING))));
+    }
+
+    @Test
+    public void testGeneralConsequent() throws Exception {
+        String text = "CONSTRUCT {\n"
+                + "  ?x ?p2 ?y"
+                + "} WHERE {\n"
+                + "  ?x ?p1 ?y .\n"
+                + "  ?p1 rdfs:subPropertyOf ?p2 .\n"
+                + "}";
+        ParsedGraphQuery query = (ParsedGraphQuery) PARSER.parseQuery(text, 
null);
+        SpinConstructRule rule = new SpinConstructRule(OWL.THING, RL_PRP_SPO1, 
query);
+        Multiset<StatementPattern> expectedAntecedents = 
HashMultiset.create(Arrays.asList(
+                new StatementPattern(new Var("p1"), ac(RDFS.SUBPROPERTYOF), 
new Var("p2")),
+                new StatementPattern(new Var("x"), new Var("p1"), new 
Var("y"))));
+        Multiset<StatementPattern> expectedConsequents = 
HashMultiset.create(Arrays.asList(
+                new StatementPattern(new Var("subject"), new Var("predicate"), 
new Var("object"))));
+        Assert.assertEquals(expectedAntecedents, 
HashMultiset.create(rule.getAntecedentPatterns()));
+        Assert.assertEquals(expectedConsequents, 
HashMultiset.create(rule.getConsequentPatterns()));
+        Assert.assertFalse(rule.hasAnonymousConsequent());
+        // Basic pattern matches
+        Assert.assertTrue(rule.canConclude(new StatementPattern(new Var("a"), 
new Var("b"), new Var("c"))));
+        // Narrower patterns match (constants in place of variables)
+        Assert.assertTrue(rule.canConclude(new StatementPattern(new Var("x"), 
c(RDFS.SUBPROPERTYOF), c(OWL.THING))));
+        Assert.assertTrue(rule.canConclude(new 
StatementPattern(c(OWL.NOTHING), new Var("prop"), c(OWL.THING))));
+        Assert.assertTrue(rule.canConclude(new 
StatementPattern(c(FOAF.PERSON), c(RDFS.SUBCLASSOF), new Var("x"))));
+        Assert.assertTrue(rule.canConclude(new 
StatementPattern(c(OWL.NOTHING), c(RDFS.SUBCLASSOF), c(FOAF.PERSON))));
+    }
+
+    @Test
+    public void testAnonymousConsequent() throws Exception {
+        String text = "CONSTRUCT {\n"
+                + "  ?x ?p2 _:something"
+                + "} WHERE {\n"
+                + "  ?x ?p1 ?y .\n"
+                + "  ?p1 rdfs:subPropertyOf ?p2 .\n"
+                + "}";
+        ParsedGraphQuery query = (ParsedGraphQuery) PARSER.parseQuery(text, 
null);
+        SpinConstructRule rule = new SpinConstructRule(OWL.THING, RL_PRP_SPO1, 
query);
+        Multiset<StatementPattern> expectedAntecedents = 
HashMultiset.create(Arrays.asList(
+                new StatementPattern(new Var("p1"), ac(RDFS.SUBPROPERTYOF), 
new Var("p2")),
+                new StatementPattern(new Var("x"), new Var("p1"), new 
Var("y"))));
+        Assert.assertEquals(expectedAntecedents, 
HashMultiset.create(rule.getAntecedentPatterns()));
+        // should have detected anonymous node
+        Assert.assertTrue(rule.hasAnonymousConsequent());
+        Var anonymousObject = new Var("object");
+        anonymousObject.setAnonymous(true);
+        Multiset<StatementPattern> expectedConsequents = 
HashMultiset.create(Arrays.asList(
+                new StatementPattern(new Var("subject"), new Var("predicate"), 
anonymousObject)));
+        Assert.assertEquals(expectedConsequents, 
HashMultiset.create(rule.getConsequentPatterns()));
+        // Pattern matches should be unaffected by anonymous node status
+        Assert.assertTrue(rule.canConclude(new StatementPattern(new Var("a"), 
new Var("b"), new Var("c"))));
+        Assert.assertTrue(rule.canConclude(new StatementPattern(new Var("x"), 
c(RDFS.SUBPROPERTYOF), c(OWL.THING))));
+        Assert.assertTrue(rule.canConclude(new 
StatementPattern(c(OWL.NOTHING), new Var("prop"), c(OWL.THING))));
+        Assert.assertTrue(rule.canConclude(new 
StatementPattern(c(FOAF.PERSON), c(RDFS.SUBCLASSOF), new Var("x"))));
+        Assert.assertTrue(rule.canConclude(new 
StatementPattern(c(OWL.NOTHING), c(RDFS.SUBCLASSOF), c(FOAF.PERSON))));
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-rya/blob/9f611019/extras/rya.forwardchain/src/test/resources/data.ttl
----------------------------------------------------------------------
diff --git a/extras/rya.forwardchain/src/test/resources/data.ttl 
b/extras/rya.forwardchain/src/test/resources/data.ttl
new file mode 100644
index 0000000..f026409
--- /dev/null
+++ b/extras/rya.forwardchain/src/test/resources/data.ttl
@@ -0,0 +1,56 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+# Sample data similar to LUBM
+
+@prefix rdf:     <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
+@prefix rdfs:    <http://www.w3.org/2000/01/rdf-schema#> .
+@prefix owl:     <http://www.w3.org/2002/07/owl#> .
+@prefix xsd:     <http://www.w3.org/2001/XMLSchema#> .
+@prefix lubm: <http://swat.cse.lehigh.edu/onto/univ-bench.owl#> .
+@prefix ex: <http://example.org/> .
+
+ex:Department0 lubm:subOrganizationOf ex:College0 .
+ex:Department1 lubm:subOrganizationOf ex:College1 .
+ex:Department2 lubm:subOrganizationOf ex:College2 .
+ex:Department3 lubm:subOrganizationOf ex:College2 .
+
+ex:College0 a lubm:Organization ; lubm:subOrganizationOf ex:University0 .
+ex:College1 a lubm:Organization ; lubm:subOrganizationOf ex:University0 .
+ex:College2 lubm:subOrganizationOf ex:University1 .
+
+ex:Department0 a lubm:Department .
+ex:Department1 a lubm:Department .
+ex:Department2 a lubm:Department .
+ex:Department3 a lubm:Department .
+
+# Professors -- infer Faculty and therefore Person 
+ex:Alice a lubm:Professor .
+ex:Bob a lubm:Professor .
+ex:Carol a lubm:Professor .
+ex:Dan a lubm:Professor .
+ex:Eve a lubm:Professor .
+
+# Can infer Organization via rdfs:range
+ex:Alice lubm:worksFor ex:Department2 .
+ex:Carol lubm:worksFor ex:Department0 .
+ex:Dan lubm:worksFor ex:Department2 .
+ex:Eve lubm:worksFor ex:Department1 .
+
+ex:Alice lubm:headOf ex:Department1 . # infer Chair and worksFor
+ex:Dan lubm:headOf ex:Department2 . # infer Chair, already have worksFor
+ex:Eve lubm:headOf ex:ResearchGroup3 . # infer worksFor, therefore 
Organization, but not Chair because not a Department
\ No newline at end of file

Reply via email to