http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/characteristics/CharacteristicSetReducerTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/characteristics/CharacteristicSetReducerTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/characteristics/CharacteristicSetReducerTest.java
new file mode 100644
index 0000000..30da730
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/characteristics/CharacteristicSetReducerTest.java
@@ -0,0 +1,192 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.characteristics;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mrunit.mapreduce.MapReduceDriver;
+import org.apache.hadoop.mrunit.types.Pair;
+import org.apache.jena.hadoop.rdf.mapreduce.AbstractMapReduceTests;
+import 
org.apache.jena.hadoop.rdf.mapreduce.characteristics.CharacteristicSetReducer;
+import org.apache.jena.hadoop.rdf.types.CharacteristicSetWritable;
+import org.apache.jena.hadoop.rdf.types.CharacteristicWritable;
+import org.junit.Assert;
+import org.junit.Test;
+
+import com.hp.hpl.jena.graph.NodeFactory;
+
+/**
+ * Abstract tests for the {@link CharacteristicSetReducer}
+ * 
+ * 
+ */
+public class CharacteristicSetReducerTest
+        extends
+        AbstractMapReduceTests<CharacteristicSetWritable, 
CharacteristicSetWritable, CharacteristicSetWritable, 
CharacteristicSetWritable, CharacteristicSetWritable, NullWritable> {
+
+    @Override
+    protected final Mapper<CharacteristicSetWritable, 
CharacteristicSetWritable, CharacteristicSetWritable, 
CharacteristicSetWritable> getMapperInstance() {
+        // Identity mapper
+        return new Mapper<CharacteristicSetWritable, 
CharacteristicSetWritable, CharacteristicSetWritable, 
CharacteristicSetWritable>();
+    }
+
+    @Override
+    protected final Reducer<CharacteristicSetWritable, 
CharacteristicSetWritable, CharacteristicSetWritable, NullWritable> 
getReducerInstance() {
+        return new CharacteristicSetReducer();
+    }
+
+    /**
+     * Creates a set consisting of the given predicates
+     * 
+     * @param predicates
+     *            Predicates
+     * @return Set
+     */
+    protected CharacteristicSetWritable createSet(
+            MapReduceDriver<CharacteristicSetWritable, 
CharacteristicSetWritable, CharacteristicSetWritable, 
CharacteristicSetWritable, CharacteristicSetWritable, NullWritable> driver,
+            int inputOccurrences, int outputOccurrences, String... predicates) 
{
+        CharacteristicSetWritable set = new CharacteristicSetWritable();
+        for (String predicateUri : predicates) {
+            set.add(new 
CharacteristicWritable(NodeFactory.createURI(predicateUri)));
+        }
+        for (int i = 1; i <= inputOccurrences; i++) {
+            driver.addInput(set, set);
+        }
+        for (int i = 1; i <= outputOccurrences; i++) {
+            driver.addOutput(set, NullWritable.get());
+        }
+        return set;
+    }
+
+    /**
+     * Test characteristic set reduction
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void characteristic_set_reducer_01() throws IOException {
+        MapReduceDriver<CharacteristicSetWritable, CharacteristicSetWritable, 
CharacteristicSetWritable, CharacteristicSetWritable, 
CharacteristicSetWritable, NullWritable> driver = this
+                .getMapReduceDriver();
+
+        this.createSet(driver, 1, 1, "http://predicate";);
+
+        driver.runTest(false);
+    }
+
+    /**
+     * Test characteristic set reduction
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void characteristic_set_reducer_02() throws IOException {
+        MapReduceDriver<CharacteristicSetWritable, CharacteristicSetWritable, 
CharacteristicSetWritable, CharacteristicSetWritable, 
CharacteristicSetWritable, NullWritable> driver = this
+                .getMapReduceDriver();
+
+        this.createSet(driver, 2, 1, "http://predicate";);
+
+        driver.runTest(false);
+
+        List<Pair<CharacteristicSetWritable, NullWritable>> results = 
driver.run();
+        CharacteristicSetWritable cw = results.get(0).getFirst();
+        Assert.assertEquals(2, cw.getCount().get());
+    }
+
+    /**
+     * Test characteristic set reduction
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void characteristic_set_reducer_03() throws IOException {
+        MapReduceDriver<CharacteristicSetWritable, CharacteristicSetWritable, 
CharacteristicSetWritable, CharacteristicSetWritable, 
CharacteristicSetWritable, NullWritable> driver = this
+                .getMapReduceDriver();
+
+        this.createSet(driver, 1, 1, "http://predicate";);
+        this.createSet(driver, 1, 1, "http://other";);
+
+        driver.runTest(false);
+    }
+
+    /**
+     * Test characteristic set reduction
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void characteristic_set_reducer_04() throws IOException {
+        MapReduceDriver<CharacteristicSetWritable, CharacteristicSetWritable, 
CharacteristicSetWritable, CharacteristicSetWritable, 
CharacteristicSetWritable, NullWritable> driver = this
+                .getMapReduceDriver();
+
+        this.createSet(driver, 2, 1, "http://predicate";);
+        this.createSet(driver, 1, 1, "http://other";);
+
+        driver.runTest(false);
+
+        List<Pair<CharacteristicSetWritable, NullWritable>> results = 
driver.run();
+        for (Pair<CharacteristicSetWritable, NullWritable> pair : results) {
+            CharacteristicSetWritable cw = pair.getFirst();
+            boolean expectTwo = 
cw.getCharacteristics().next().getNode().get().hasURI("http://predicate";);
+            Assert.assertEquals(expectTwo ? 2 : 1, cw.getCount().get());
+        }
+    }
+
+    /**
+     * Test characteristic set reduction
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void characteristic_set_reducer_05() throws IOException {
+        MapReduceDriver<CharacteristicSetWritable, CharacteristicSetWritable, 
CharacteristicSetWritable, CharacteristicSetWritable, 
CharacteristicSetWritable, NullWritable> driver = this
+                .getMapReduceDriver();
+
+        this.createSet(driver, 1, 1, "http://predicate";, "http://other";);
+        this.createSet(driver, 1, 1, "http://other";);
+
+        driver.runTest(false);
+    }
+
+    /**
+     * Test characteristic set reduction
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void characteristic_set_reducer_06() throws IOException {
+        MapReduceDriver<CharacteristicSetWritable, CharacteristicSetWritable, 
CharacteristicSetWritable, CharacteristicSetWritable, 
CharacteristicSetWritable, NullWritable> driver = this
+                .getMapReduceDriver();
+
+        this.createSet(driver, 2, 1, "http://predicate";, "http://other";);
+        this.createSet(driver, 1, 1, "http://other";);
+
+        driver.runTest(false);
+
+        List<Pair<CharacteristicSetWritable, NullWritable>> results = 
driver.run();
+        for (Pair<CharacteristicSetWritable, NullWritable> pair : results) {
+            CharacteristicSetWritable cw = pair.getFirst();
+            boolean expectTwo = cw.hasCharacteristic("http://predicate";);
+            Assert.assertEquals(expectTwo ? 2 : 1, cw.getCount().get());
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/characteristics/TripleCharacteristicSetGeneratingReducerTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/characteristics/TripleCharacteristicSetGeneratingReducerTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/characteristics/TripleCharacteristicSetGeneratingReducerTest.java
new file mode 100644
index 0000000..e647b68
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/characteristics/TripleCharacteristicSetGeneratingReducerTest.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.characteristics;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import 
org.apache.jena.hadoop.rdf.mapreduce.characteristics.TripleCharacteristicSetGeneratingReducer;
+import org.apache.jena.hadoop.rdf.mapreduce.group.TripleGroupBySubjectMapper;
+import org.apache.jena.hadoop.rdf.types.CharacteristicSetWritable;
+import org.apache.jena.hadoop.rdf.types.NodeWritable;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
+import com.hp.hpl.jena.graph.NodeFactory;
+import com.hp.hpl.jena.graph.Triple;
+
+/**
+ * Tests for the {@link TripleCharacteristicSetGeneratingReducer}
+ * 
+ * 
+ * 
+ */
+public class TripleCharacteristicSetGeneratingReducerTest extends 
AbstractCharacteristicSetGeneratingReducerTests<Triple, TripleWritable> {
+
+    @Override
+    protected Mapper<LongWritable, TripleWritable, NodeWritable, 
TripleWritable> getMapperInstance() {
+        return new TripleGroupBySubjectMapper<LongWritable>();
+    }
+
+    @Override
+    protected Reducer<NodeWritable, TripleWritable, CharacteristicSetWritable, 
NullWritable> getReducerInstance() {
+        return new TripleCharacteristicSetGeneratingReducer();
+    }
+
+    @Override
+    protected TripleWritable createTuple(int i, String predicateUri) {
+        return new TripleWritable(new 
Triple(NodeFactory.createURI("http://subjects/"; + i), 
NodeFactory.createURI(predicateUri),
+                NodeFactory.createLiteral(Integer.toString(i), 
XSDDatatype.XSDinteger)));
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/AbstractNodeTupleNodeCountReducedTests.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/AbstractNodeTupleNodeCountReducedTests.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/AbstractNodeTupleNodeCountReducedTests.java
new file mode 100644
index 0000000..ebdbcde
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/AbstractNodeTupleNodeCountReducedTests.java
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.count;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mrunit.mapreduce.MapReduceDriver;
+import org.apache.jena.hadoop.rdf.mapreduce.AbstractMapReduceTests;
+import 
org.apache.jena.hadoop.rdf.mapreduce.count.AbstractNodeTupleNodeCountMapper;
+import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
+import org.apache.jena.hadoop.rdf.types.NodeWritable;
+import org.junit.Test;
+
+
+/**
+ * Abstract tests for mappers derived from
+ * {@link AbstractNodeTupleNodeCountMapper}
+ * 
+ * 
+ * 
+ * @param <TValue>
+ *            Tuple type
+ * @param <T>
+ *            Writable tuple type
+ */
+public abstract class AbstractNodeTupleNodeCountReducedTests<TValue, T extends 
AbstractNodeTupleWritable<TValue>> extends
+        AbstractMapReduceTests<LongWritable, T, NodeWritable, LongWritable, 
NodeWritable, LongWritable> {
+
+    /**
+     * Generates tuples for the tests
+     * 
+     * @param driver
+     *            Driver
+     * @param num
+     *            Number of tuples to generate
+     */
+    protected void generateData(MapReduceDriver<LongWritable, T, NodeWritable, 
LongWritable, NodeWritable, LongWritable> driver, int num) {
+        Map<NodeWritable, Long> counts = new HashMap<NodeWritable, Long>();
+        for (int i = 0; i < num; i++) {
+            LongWritable key = new LongWritable(i);
+            T value = this.createValue(i);
+            NodeWritable[] nodes = this.getNodes(value);
+
+            driver.addInput(key, value);
+            for (NodeWritable n : nodes) {
+                if (counts.containsKey(n)) {
+                    counts.put(n, counts.get(n) + 1);
+                } else {
+                    counts.put(n, 1l);
+                }
+            }
+        }
+        
+        for (Entry<NodeWritable, Long> kvp : counts.entrySet()) {
+            driver.addOutput(kvp.getKey(), new LongWritable(kvp.getValue()));
+        }
+    }
+
+    /**
+     * Creates a tuple value
+     * 
+     * @param i
+     *            Index
+     * @return Tuple value
+     */
+    protected abstract T createValue(int i);
+
+    /**
+     * Splits the tuple value into its constituent nodes
+     * 
+     * @param tuple
+     *            Tuple value
+     * @return Nodes
+     */
+    protected abstract NodeWritable[] getNodes(T tuple);
+
+    /**
+     * Runs a node count test
+     * 
+     * @param num
+     *            Number of tuples to generate
+     * @throws IOException
+     */
+    protected void testNodeCount(int num) throws IOException {
+        MapReduceDriver<LongWritable, T, NodeWritable, LongWritable, 
NodeWritable, LongWritable> driver = this.getMapReduceDriver();
+        this.generateData(driver, num);
+        driver.runTest(false);
+    }
+
+    /**
+     * Tests node counting
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void node_count_01() throws IOException {
+        this.testNodeCount(1);
+    }
+
+    /**
+     * Tests node counting
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void node_count_02() throws IOException {
+        this.testNodeCount(100);
+    }
+
+    /**
+     * Tests node counting
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void node_count_03() throws IOException {
+        this.testNodeCount(1000);
+    }
+
+    /**
+     * Tests node counting
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void node_count_04() throws IOException {
+        this.testNodeCount(2500);
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/AbstractNodeTupleNodeCountTests.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/AbstractNodeTupleNodeCountTests.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/AbstractNodeTupleNodeCountTests.java
new file mode 100644
index 0000000..e589b3a
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/AbstractNodeTupleNodeCountTests.java
@@ -0,0 +1,138 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.count;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mrunit.mapreduce.MapDriver;
+import org.apache.jena.hadoop.rdf.mapreduce.AbstractMapperTests;
+import 
org.apache.jena.hadoop.rdf.mapreduce.count.AbstractNodeTupleNodeCountMapper;
+import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
+import org.apache.jena.hadoop.rdf.types.NodeWritable;
+import org.junit.Test;
+
+
+/**
+ * Abstract tests for mappers derived from
+ * {@link AbstractNodeTupleNodeCountMapper}
+ * 
+ * 
+ * 
+ * @param <TValue>
+ *            Tuple type
+ * @param <T>
+ *            Writable tuple type
+ */
+public abstract class AbstractNodeTupleNodeCountTests<TValue, T extends 
AbstractNodeTupleWritable<TValue>> extends
+        AbstractMapperTests<LongWritable, T, NodeWritable, LongWritable> {
+
+    /**
+     * Generates tuples for the tests
+     * 
+     * @param driver
+     *            Driver
+     * @param num
+     *            Number of tuples to generate
+     */
+    protected void generateData(MapDriver<LongWritable, T, NodeWritable, 
LongWritable> driver, int num) {
+        LongWritable expectedCount = new LongWritable(1);
+        for (int i = 0; i < num; i++) {
+            LongWritable key = new LongWritable(i);
+            T value = this.createValue(i);
+            NodeWritable[] nodes = this.getNodes(value);
+
+            driver.addInput(key, value);
+            for (NodeWritable n : nodes) {
+                driver.addOutput(n, expectedCount);
+            }
+        }
+    }
+
+    /**
+     * Creates a tuple value
+     * 
+     * @param i
+     *            Index
+     * @return Tuple value
+     */
+    protected abstract T createValue(int i);
+
+    /**
+     * Splits the tuple value into its constituent nodes
+     * 
+     * @param tuple
+     *            Tuple value
+     * @return Nodes
+     */
+    protected abstract NodeWritable[] getNodes(T tuple);
+
+    /**
+     * Runs a node count test
+     * 
+     * @param num
+     *            Number of tuples to generate
+     * @throws IOException
+     */
+    protected void testNodeCount(int num) throws IOException {
+        MapDriver<LongWritable, T, NodeWritable, LongWritable> driver = 
this.getMapDriver();
+        this.generateData(driver, num);
+        driver.runTest();
+    }
+
+    /**
+     * Tests node counting
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void node_count_01() throws IOException {
+        this.testNodeCount(1);
+    }
+
+    /**
+     * Tests node counting
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void node_count_02() throws IOException {
+        this.testNodeCount(100);
+    }
+
+    /**
+     * Tests node counting
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void node_count_03() throws IOException {
+        this.testNodeCount(1000);
+    }
+
+    /**
+     * Tests node counting
+     * 
+     * @throws IOException
+     */
+    @Test
+    public void node_count_04() throws IOException {
+        this.testNodeCount(2500);
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/QuadNodeCountMapReduceTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/QuadNodeCountMapReduceTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/QuadNodeCountMapReduceTest.java
new file mode 100644
index 0000000..b453bee
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/QuadNodeCountMapReduceTest.java
@@ -0,0 +1,67 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.count;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.jena.hadoop.rdf.mapreduce.count.NodeCountReducer;
+import org.apache.jena.hadoop.rdf.mapreduce.count.QuadNodeCountMapper;
+import org.apache.jena.hadoop.rdf.mapreduce.count.TripleNodeCountMapper;
+import org.apache.jena.hadoop.rdf.types.NodeWritable;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+
+import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
+import com.hp.hpl.jena.graph.NodeFactory;
+import com.hp.hpl.jena.graph.Triple;
+import com.hp.hpl.jena.sparql.core.Quad;
+
+/**
+ * Tests for the {@link TripleNodeCountMapper} used in conjunction with the
+ * {@link NodeCountReducer}
+ * 
+ * 
+ * 
+ */
+public class QuadNodeCountMapReduceTest extends 
AbstractNodeTupleNodeCountReducedTests<Quad, QuadWritable> {
+
+    @Override
+    protected Mapper<LongWritable, QuadWritable, NodeWritable, LongWritable> 
getMapperInstance() {
+        return new QuadNodeCountMapper<LongWritable>();
+    }
+
+    @Override
+    protected Reducer<NodeWritable, LongWritable, NodeWritable, LongWritable> 
getReducerInstance() {
+        return new NodeCountReducer();
+    }
+
+    @Override
+    protected QuadWritable createValue(int i) {
+        return new QuadWritable(new Quad(Quad.defaultGraphNodeGenerated, new 
Triple(
+                NodeFactory.createURI("http://subjects/"; + i), 
NodeFactory.createURI("http://predicate";),
+                NodeFactory.createLiteral(Integer.toString(i), 
XSDDatatype.XSDinteger))));
+    }
+
+    @Override
+    protected NodeWritable[] getNodes(QuadWritable tuple) {
+        Quad q = tuple.get();
+        return new NodeWritable[] { new NodeWritable(q.getGraph()), new 
NodeWritable(q.getSubject()),
+                new NodeWritable(q.getPredicate()), new 
NodeWritable(q.getObject()) };
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/QuadNodeCountMapperTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/QuadNodeCountMapperTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/QuadNodeCountMapperTest.java
new file mode 100644
index 0000000..869fc06
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/QuadNodeCountMapperTest.java
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.count;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.jena.hadoop.rdf.mapreduce.count.QuadNodeCountMapper;
+import org.apache.jena.hadoop.rdf.types.NodeWritable;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+
+import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
+import com.hp.hpl.jena.graph.NodeFactory;
+import com.hp.hpl.jena.graph.Triple;
+import com.hp.hpl.jena.sparql.core.Quad;
+
+/**
+ * Tests for the {@link QuadNodeCountMapper}
+ * 
+ * 
+ * 
+ */
+public class QuadNodeCountMapperTest extends 
AbstractNodeTupleNodeCountTests<Quad, QuadWritable> {
+
+    @Override
+    protected Mapper<LongWritable, QuadWritable, NodeWritable, LongWritable> 
getInstance() {
+        return new QuadNodeCountMapper<LongWritable>();
+    }
+
+    @Override
+    protected QuadWritable createValue(int i) {
+        return new QuadWritable(new Quad(Quad.defaultGraphNodeGenerated, new 
Triple(
+                NodeFactory.createURI("http://subjects/"; + i), 
NodeFactory.createURI("http://predicate";),
+                NodeFactory.createLiteral(Integer.toString(i), 
XSDDatatype.XSDinteger))));
+    }
+
+    @Override
+    protected NodeWritable[] getNodes(QuadWritable tuple) {
+        Quad q = tuple.get();
+        return new NodeWritable[] { new NodeWritable(q.getGraph()), new 
NodeWritable(q.getSubject()),
+                new NodeWritable(q.getPredicate()), new 
NodeWritable(q.getObject()) };
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/TripleNodeCountMapReduceTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/TripleNodeCountMapReduceTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/TripleNodeCountMapReduceTest.java
new file mode 100644
index 0000000..660cfe7
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/TripleNodeCountMapReduceTest.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.count;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.jena.hadoop.rdf.mapreduce.count.NodeCountReducer;
+import org.apache.jena.hadoop.rdf.mapreduce.count.TripleNodeCountMapper;
+import org.apache.jena.hadoop.rdf.types.NodeWritable;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
+import com.hp.hpl.jena.graph.NodeFactory;
+import com.hp.hpl.jena.graph.Triple;
+
+/**
+ * Tests for the {@link TripleNodeCountMapper} used in conjunction with the
+ * {@link NodeCountReducer}
+ * 
+ * 
+ * 
+ */
+public class TripleNodeCountMapReduceTest extends 
AbstractNodeTupleNodeCountReducedTests<Triple, TripleWritable> {
+
+    @Override
+    protected Mapper<LongWritable, TripleWritable, NodeWritable, LongWritable> 
getMapperInstance() {
+        return new TripleNodeCountMapper<LongWritable>();
+    }
+    
+
+    @Override
+    protected Reducer<NodeWritable, LongWritable, NodeWritable, LongWritable> 
getReducerInstance() {
+        return new NodeCountReducer();
+    }
+
+    @Override
+    protected TripleWritable createValue(int i) {
+        return new TripleWritable(
+                new Triple(NodeFactory.createURI("http://subjects/"; + i), 
NodeFactory.createURI("http://predicate";),
+                        NodeFactory.createLiteral(Integer.toString(i), 
XSDDatatype.XSDinteger)));
+    }
+
+    @Override
+    protected NodeWritable[] getNodes(TripleWritable tuple) {
+        Triple t = tuple.get();
+        return new NodeWritable[] { new NodeWritable(t.getSubject()), new 
NodeWritable(t.getPredicate()),
+                new NodeWritable(t.getObject()) };
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/TripleNodeCountMapperTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/TripleNodeCountMapperTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/TripleNodeCountMapperTest.java
new file mode 100644
index 0000000..fdac378
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/count/TripleNodeCountMapperTest.java
@@ -0,0 +1,58 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.count;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.jena.hadoop.rdf.mapreduce.count.TripleNodeCountMapper;
+import org.apache.jena.hadoop.rdf.types.NodeWritable;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
+import com.hp.hpl.jena.graph.NodeFactory;
+import com.hp.hpl.jena.graph.Triple;
+
+/**
+ * Tests for the {@link TripleNodeCountMapper}
+ * 
+ * 
+ * 
+ */
+public class TripleNodeCountMapperTest extends 
AbstractNodeTupleNodeCountTests<Triple, TripleWritable> {
+
+    @Override
+    protected Mapper<LongWritable, TripleWritable, NodeWritable, LongWritable> 
getInstance() {
+        return new TripleNodeCountMapper<LongWritable>();
+    }
+
+    @Override
+    protected TripleWritable createValue(int i) {
+        return new TripleWritable(
+                new Triple(NodeFactory.createURI("http://subjects/"; + i), 
NodeFactory.createURI("http://predicate";),
+                        NodeFactory.createLiteral(Integer.toString(i), 
XSDDatatype.XSDinteger)));
+    }
+
+    @Override
+    protected NodeWritable[] getNodes(TripleWritable tuple) {
+        Triple t = tuple.get();
+        return new NodeWritable[] { new NodeWritable(t.getSubject()), new 
NodeWritable(t.getPredicate()),
+                new NodeWritable(t.getObject()) };
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/AbstractNodeTupleFilterTests.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/AbstractNodeTupleFilterTests.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/AbstractNodeTupleFilterTests.java
new file mode 100644
index 0000000..23c5661
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/AbstractNodeTupleFilterTests.java
@@ -0,0 +1,146 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.filter;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mrunit.mapreduce.MapDriver;
+import org.apache.jena.hadoop.rdf.mapreduce.AbstractMapperTests;
+import 
org.apache.jena.hadoop.rdf.mapreduce.filter.AbstractNodeTupleFilterMapper;
+import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
+import org.junit.Test;
+
+
+/**
+ * Abstract tests for {@link AbstractNodeTupleFilterMapper} implementations
+ * which filter based on the validity of tuples
+ * 
+ * 
+ * 
+ * @param <TValue>
+ *            Tuple type
+ * @param <T>
+ *            Writable tuple type
+ */
+public abstract class AbstractNodeTupleFilterTests<TValue, T extends 
AbstractNodeTupleWritable<TValue>> extends
+        AbstractMapperTests<LongWritable, T, LongWritable, T> {
+
+    protected final void generateData(MapDriver<LongWritable, T, LongWritable, 
T> driver, int num) {
+        for (int i = 0; i < num; i++) {
+            LongWritable key = new LongWritable(i);
+            if (i % 2 == 0 && !this.noValidInputs()) {
+                T value = this.createValidValue(i);
+                driver.addInput(key, value);
+                if (!this.isInverted())
+                    driver.addOutput(key, value);
+            } else {
+                T value = this.createInvalidValue(i);
+                driver.addInput(key, value);
+                if (this.isInverted())
+                    driver.addOutput(key, value);
+            }
+        }
+    }
+
+    /**
+     * Method that may be overridden for testing filters where all the 
generated
+     * data will be rejected as invalid
+     * 
+     * @return True if there are no valid inputs, false otherwise (default)
+     */
+    protected boolean noValidInputs() {
+        return false;
+    }
+
+    /**
+     * Method that may be overridden for testing filters with inverted mode
+     * enabled i.e. where normally valid input is considered invalid and vice
+     * versa
+     * 
+     * @return True if inverted, false otherwise (default)
+     */
+    protected boolean isInverted() {
+        return false;
+    }
+
+    /**
+     * Creates an invalid value
+     * 
+     * @param i
+     *            Key
+     * @return Invalid value
+     */
+    protected abstract T createInvalidValue(int i);
+
+    /**
+     * Creates a valid value
+     * 
+     * @param i
+     *            Key
+     * @return Valid value
+     */
+    protected abstract T createValidValue(int i);
+
+    protected final void testFilterValid(int num) throws IOException {
+        MapDriver<LongWritable, T, LongWritable, T> driver = 
this.getMapDriver();
+        this.generateData(driver, num);
+        driver.runTest();
+    }
+
+    /**
+     * Test splitting tuples into their constituent nodes
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void filter_valid_01() throws IOException {
+        this.testFilterValid(1);
+    }
+
+    /**
+     * Test splitting tuples into their constituent nodes
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void filter_valid_02() throws IOException {
+        this.testFilterValid(100);
+    }
+
+    /**
+     * Test splitting tuples into their constituent nodes
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void filter_valid_03() throws IOException {
+        this.testFilterValid(1000);
+    }
+
+    /**
+     * Test splitting tuples into their constituent nodes
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void filter_valid_04() throws IOException {
+        this.testFilterValid(2500);
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/AbstractQuadValidityFilterTests.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/AbstractQuadValidityFilterTests.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/AbstractQuadValidityFilterTests.java
new file mode 100644
index 0000000..9458f83
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/AbstractQuadValidityFilterTests.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.filter;
+
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+
+import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
+import com.hp.hpl.jena.graph.NodeFactory;
+import com.hp.hpl.jena.sparql.core.Quad;
+
+/**
+ * Abstract tests for triple filter mappers that check triple validity
+ * 
+ * 
+ * 
+ */
+public abstract class AbstractQuadValidityFilterTests extends 
AbstractNodeTupleFilterTests<Quad, QuadWritable> {
+
+    @Override
+    protected QuadWritable createValidValue(int i) {
+        return new QuadWritable(
+                new Quad(Quad.defaultGraphNodeGenerated, 
NodeFactory.createURI("http://subjects/"; + i),
+                        NodeFactory.createURI("http://predicate";), 
NodeFactory.createLiteral(Integer.toString(i),
+                                XSDDatatype.XSDinteger)));
+    }
+
+    @Override
+    protected QuadWritable createInvalidValue(int i) {
+        switch (i % 8) {
+        case 0:
+            // Invalid to use Literal as Graph
+            return new QuadWritable(new 
Quad(NodeFactory.createLiteral("invalid"), 
NodeFactory.createURI("http://subjects/"; + i),
+                    NodeFactory.createURI("http://predicate";), 
NodeFactory.createLiteral(Integer.toString(i),
+                            XSDDatatype.XSDinteger)));
+        case 1:
+            // Invalid to use Variable as Graph
+            return new QuadWritable(new 
Quad(NodeFactory.createVariable("invalid"),
+                    NodeFactory.createURI("http://subjects/"; + i), 
NodeFactory.createURI("http://predicate";),
+                    NodeFactory.createLiteral(Integer.toString(i), 
XSDDatatype.XSDinteger)));
+        case 2:
+            // Invalid to use Literal as Subject
+            return new QuadWritable(new Quad(Quad.defaultGraphNodeGenerated, 
NodeFactory.createLiteral("invalid"),
+                    NodeFactory.createURI("http://predicate";), 
NodeFactory.createLiteral(Integer.toString(i),
+                            XSDDatatype.XSDinteger)));
+        case 3:
+            // Invalid to use Variable as Subject
+            return new QuadWritable(new Quad(Quad.defaultGraphNodeGenerated, 
NodeFactory.createVariable("invalid"),
+                    NodeFactory.createURI("http://predicate";), 
NodeFactory.createLiteral(Integer.toString(i),
+                            XSDDatatype.XSDinteger)));
+        case 4:
+            // Invalid to use Blank Node as Predicate
+            return new QuadWritable(new Quad(Quad.defaultGraphNodeGenerated, 
NodeFactory.createURI("http://subjects/"; + i),
+                    NodeFactory.createAnon(), 
NodeFactory.createLiteral(Integer.toString(i), XSDDatatype.XSDinteger)));
+        case 5:
+            // Invalid to use Literal as Predicate
+            return new QuadWritable(new Quad(Quad.defaultGraphNodeGenerated, 
NodeFactory.createURI("http://subjects/"; + i),
+                    NodeFactory.createLiteral("invalid"), 
NodeFactory.createLiteral(Integer.toString(i), XSDDatatype.XSDinteger)));
+        case 6:
+            // Invalid to use Variable as Predicate
+            return new QuadWritable(
+                    new Quad(Quad.defaultGraphNodeGenerated, 
NodeFactory.createURI("http://subjects/"; + i),
+                            NodeFactory.createVariable("invalid"), 
NodeFactory.createLiteral(Integer.toString(i),
+                                    XSDDatatype.XSDinteger)));
+        default:
+            // Invalid to use Variable as Object
+            return new QuadWritable(new Quad(Quad.defaultGraphNodeGenerated, 
NodeFactory.createURI("http://subjects/"; + i),
+                    NodeFactory.createURI("http://predicate";), 
NodeFactory.createVariable("invalid")));
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/AbstractTripleValidityFilterTests.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/AbstractTripleValidityFilterTests.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/AbstractTripleValidityFilterTests.java
new file mode 100644
index 0000000..3c41710
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/AbstractTripleValidityFilterTests.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.filter;
+
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
+import com.hp.hpl.jena.graph.NodeFactory;
+import com.hp.hpl.jena.graph.Triple;
+
+/**
+ * Abstract tests for triple filter mappers that check triple validity
+ * 
+ * 
+ * 
+ */
+public abstract class AbstractTripleValidityFilterTests extends 
AbstractNodeTupleFilterTests<Triple, TripleWritable> {
+
+    @Override
+    protected TripleWritable createValidValue(int i) {
+        return new TripleWritable(
+                new Triple(NodeFactory.createURI("http://subjects/"; + i), 
NodeFactory.createURI("http://predicate";),
+                        NodeFactory.createLiteral(Integer.toString(i), 
XSDDatatype.XSDinteger)));
+    }
+
+    @Override
+    protected TripleWritable createInvalidValue(int i) {
+        switch (i % 6) {
+        case 0:
+            // Invalid to use Literal as Subject
+            return new TripleWritable(new 
Triple(NodeFactory.createLiteral("invalid"), 
NodeFactory.createURI("http://predicate";),
+                    NodeFactory.createLiteral(Integer.toString(i), 
XSDDatatype.XSDinteger)));
+        case 1:
+            // Invalid to use Variable as Subject
+            return new TripleWritable(new 
Triple(NodeFactory.createVariable("invalid"),
+                    NodeFactory.createURI("http://predicate";), 
NodeFactory.createLiteral(Integer.toString(i),
+                            XSDDatatype.XSDinteger)));
+        case 2:
+            // Invalid to use Blank Node as Predicate
+            return new TripleWritable(new 
Triple(NodeFactory.createURI("http://subjects/"; + i), NodeFactory.createAnon(),
+                    NodeFactory.createLiteral(Integer.toString(i), 
XSDDatatype.XSDinteger)));
+        case 3:
+            // Invalid to use Literal as Predicate
+            return new TripleWritable(new 
Triple(NodeFactory.createURI("http://subjects/"; + i),
+                    NodeFactory.createLiteral("invalid"), 
NodeFactory.createLiteral(Integer.toString(i), XSDDatatype.XSDinteger)));
+        case 4:
+            // Invalid to use Variable as Predicate
+            return new TripleWritable(
+                    new Triple(NodeFactory.createURI("http://subjects/"; + i), 
NodeFactory.createVariable("invalid"),
+                            NodeFactory.createLiteral(Integer.toString(i), 
XSDDatatype.XSDinteger)));
+        default:
+            // Invalid to use Variable as Object
+            return new TripleWritable(new 
Triple(NodeFactory.createURI("http://subjects/"; + i),
+                    NodeFactory.createURI("http://predicate";), 
NodeFactory.createVariable("invalid")));
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/TripleFilterByNoPredicateMapperTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/TripleFilterByNoPredicateMapperTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/TripleFilterByNoPredicateMapperTest.java
new file mode 100644
index 0000000..4c9fb5a
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/TripleFilterByNoPredicateMapperTest.java
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.filter;
+
+import 
org.apache.jena.hadoop.rdf.mapreduce.filter.positional.TripleFilterByPredicateUriMapper;
+
+/**
+ * Tests for the {@link TripleFilterByPredicateUriMapper} where there are no
+ * predicates and thus all data must be invalid
+ * 
+ * 
+ * 
+ */
+public class TripleFilterByNoPredicateMapperTest extends 
TripleFilterByPredicateMapperTest {
+
+    private static final String[] EMPTY_PREDICATE_POOL = new String[0];
+
+    /**
+     * Gets the pool of predicates considered valid
+     * 
+     * @return Predicate pool
+     */
+    @Override
+    protected String[] getPredicatePool() {
+        return EMPTY_PREDICATE_POOL;
+    }
+
+    @Override
+    protected boolean noValidInputs() {
+        return true;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/TripleFilterByPredicateMapperTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/TripleFilterByPredicateMapperTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/TripleFilterByPredicateMapperTest.java
new file mode 100644
index 0000000..add363e
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/TripleFilterByPredicateMapperTest.java
@@ -0,0 +1,80 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.filter;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mrunit.mapreduce.MapDriver;
+import org.apache.jena.hadoop.rdf.mapreduce.RdfMapReduceConstants;
+import 
org.apache.jena.hadoop.rdf.mapreduce.filter.positional.TripleFilterByPredicateUriMapper;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
+import com.hp.hpl.jena.graph.NodeFactory;
+import com.hp.hpl.jena.graph.Triple;
+import com.hp.hpl.jena.vocabulary.RDF;
+import com.hp.hpl.jena.vocabulary.RDFS;
+
+/**
+ * Tests for the {@link TripleFilterByPredicateUriMapper}
+ * 
+ * 
+ * 
+ */
+public class TripleFilterByPredicateMapperTest extends 
AbstractNodeTupleFilterTests<Triple, TripleWritable> {
+
+    private static final String[] DEFAULT_PREDICATE_POOL = new String[] { 
RDF.type.getURI(), RDFS.range.getURI(),
+            RDFS.domain.getURI() };
+
+    @Override
+    protected Mapper<LongWritable, TripleWritable, LongWritable, 
TripleWritable> getInstance() {
+        return new TripleFilterByPredicateUriMapper<LongWritable>();
+    }
+
+    @Override
+    protected void configureDriver(MapDriver<LongWritable, TripleWritable, 
LongWritable, TripleWritable> driver) {
+        super.configureDriver(driver);
+        
driver.getContext().getConfiguration().setStrings(RdfMapReduceConstants.FILTER_PREDICATE_URIS,
 this.getPredicatePool());
+    }
+
+    /**
+     * Gets the pool of predicates considered valid
+     * 
+     * @return Predicate pool
+     */
+    protected String[] getPredicatePool() {
+        return DEFAULT_PREDICATE_POOL;
+    }
+
+    @Override
+    protected TripleWritable createInvalidValue(int i) {
+        return new TripleWritable(
+                new Triple(NodeFactory.createURI("http://subjects/"; + i), 
NodeFactory.createURI("http://predicate";),
+                        NodeFactory.createLiteral(Integer.toString(i), 
XSDDatatype.XSDinteger)));
+    }
+
+    @Override
+    protected TripleWritable createValidValue(int i) {
+        String[] predicates = this.getPredicatePool();
+        if (predicates.length == 0) return this.createInvalidValue(i);
+        return new TripleWritable(new 
Triple(NodeFactory.createURI("http://subjects/"; + i), 
NodeFactory.createURI(predicates[i
+                % predicates.length]), 
NodeFactory.createLiteral(Integer.toString(i), XSDDatatype.XSDinteger)));
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/TripleInvertedFilterByNoPredicateMapperTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/TripleInvertedFilterByNoPredicateMapperTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/TripleInvertedFilterByNoPredicateMapperTest.java
new file mode 100644
index 0000000..fb7dd25
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/TripleInvertedFilterByNoPredicateMapperTest.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.filter;
+
+import 
org.apache.jena.hadoop.rdf.mapreduce.filter.positional.TripleFilterByPredicateUriMapper;
+
+/**
+ * Tests for the {@link TripleFilterByPredicateUriMapper} where there are no
+ * predicates and thus all data must be invalid
+ * 
+ * 
+ * 
+ */
+public class TripleInvertedFilterByNoPredicateMapperTest extends 
TripleInvertedFilterByPredicateMapperTest {
+
+    private static final String[] EMPTY_PREDICATE_POOL = new String[0];
+
+    /**
+     * Gets the pool of predicates considered valid
+     * 
+     * @return Predicate pool
+     */
+    @Override
+    protected String[] getPredicatePool() {
+        return EMPTY_PREDICATE_POOL;
+    }
+
+    @Override
+    protected boolean noValidInputs() {
+        return true;
+    }
+    
+    @Override
+    protected boolean isInverted() {
+        return true;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/TripleInvertedFilterByPredicateMapperTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/TripleInvertedFilterByPredicateMapperTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/TripleInvertedFilterByPredicateMapperTest.java
new file mode 100644
index 0000000..b24e9c2
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/TripleInvertedFilterByPredicateMapperTest.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.filter;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mrunit.mapreduce.MapDriver;
+import org.apache.jena.hadoop.rdf.mapreduce.RdfMapReduceConstants;
+import 
org.apache.jena.hadoop.rdf.mapreduce.filter.positional.TripleFilterByPredicateUriMapper;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
+import com.hp.hpl.jena.graph.NodeFactory;
+import com.hp.hpl.jena.graph.Triple;
+import com.hp.hpl.jena.vocabulary.RDF;
+import com.hp.hpl.jena.vocabulary.RDFS;
+
+/**
+ * Tests for the {@link TripleFilterByPredicateUriMapper}
+ * 
+ * 
+ * 
+ */
+public class TripleInvertedFilterByPredicateMapperTest extends 
AbstractNodeTupleFilterTests<Triple, TripleWritable> {
+
+    private static final String[] DEFAULT_PREDICATE_POOL = new String[] { 
RDF.type.getURI(), RDFS.range.getURI(),
+            RDFS.domain.getURI() };
+
+    @Override
+    protected Mapper<LongWritable, TripleWritable, LongWritable, 
TripleWritable> getInstance() {
+        return new TripleFilterByPredicateUriMapper<LongWritable>();
+    }
+
+    @Override
+    protected void configureDriver(MapDriver<LongWritable, TripleWritable, 
LongWritable, TripleWritable> driver) {
+        super.configureDriver(driver);
+        
driver.getContext().getConfiguration().setStrings(RdfMapReduceConstants.FILTER_PREDICATE_URIS,
 this.getPredicatePool());
+        
driver.getContext().getConfiguration().setBoolean(RdfMapReduceConstants.FILTER_INVERT,
 true);
+    }
+
+    @Override
+    protected boolean isInverted() {
+        return true;
+    }
+
+    /**
+     * Gets the pool of predicates considered valid
+     * 
+     * @return Predicate pool
+     */
+    protected String[] getPredicatePool() {
+        return DEFAULT_PREDICATE_POOL;
+    }
+
+    @Override
+    protected TripleWritable createInvalidValue(int i) {
+        return new TripleWritable(
+                new Triple(NodeFactory.createURI("http://subjects/"; + i), 
NodeFactory.createURI("http://predicate";),
+                        NodeFactory.createLiteral(Integer.toString(i), 
XSDDatatype.XSDinteger)));
+    }
+
+    @Override
+    protected TripleWritable createValidValue(int i) {
+        String[] predicates = this.getPredicatePool();
+        if (predicates.length == 0)
+            return this.createInvalidValue(i);
+        return new TripleWritable(new 
Triple(NodeFactory.createURI("http://subjects/"; + i), 
NodeFactory.createURI(predicates[i
+                % predicates.length]), 
NodeFactory.createLiteral(Integer.toString(i), XSDDatatype.XSDinteger)));
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/ValidQuadFilterMapperTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/ValidQuadFilterMapperTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/ValidQuadFilterMapperTest.java
new file mode 100644
index 0000000..33b4bd1
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/ValidQuadFilterMapperTest.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.filter;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.jena.hadoop.rdf.mapreduce.filter.ValidQuadFilterMapper;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+
+
+/**
+ * Tests for the {@link ValidQuadFilterMapper}
+ * 
+ * 
+ * 
+ */
+public class ValidQuadFilterMapperTest extends AbstractQuadValidityFilterTests 
{
+
+    @Override
+    protected Mapper<LongWritable, QuadWritable, LongWritable, QuadWritable> 
getInstance() {
+        return new ValidQuadFilterMapper<LongWritable>();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/ValidTripleFilterMapperTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/ValidTripleFilterMapperTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/ValidTripleFilterMapperTest.java
new file mode 100644
index 0000000..9de6395
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/filter/ValidTripleFilterMapperTest.java
@@ -0,0 +1,40 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.filter;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.jena.hadoop.rdf.mapreduce.filter.ValidTripleFilterMapper;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+
+/**
+ * Tests for the {@link ValidTripleFilterMapper}
+ * 
+ * 
+ * 
+ */
+public class ValidTripleFilterMapperTest extends 
AbstractTripleValidityFilterTests {
+
+    @Override
+    protected Mapper<LongWritable, TripleWritable, LongWritable, 
TripleWritable> getInstance() {
+        return new ValidTripleFilterMapper<LongWritable>();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/AbstractNodeTupleGroupingTests.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/AbstractNodeTupleGroupingTests.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/AbstractNodeTupleGroupingTests.java
new file mode 100644
index 0000000..1e362d1
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/AbstractNodeTupleGroupingTests.java
@@ -0,0 +1,114 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.group;
+
+import java.io.IOException;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mrunit.mapreduce.MapDriver;
+import org.apache.jena.hadoop.rdf.mapreduce.AbstractMapperTests;
+import 
org.apache.jena.hadoop.rdf.mapreduce.split.AbstractNodeTupleSplitToNodesMapper;
+import org.apache.jena.hadoop.rdf.types.AbstractNodeTupleWritable;
+import org.apache.jena.hadoop.rdf.types.NodeWritable;
+import org.junit.Test;
+
+
+/**
+ * Abstract tests for {@link AbstractNodeTupleSplitToNodesMapper}
+ * implementations
+ * 
+ * 
+ * 
+ * @param <TValue>
+ *            Tuple type
+ * @param <T>
+ *            Writable tuple type
+ */
+public abstract class AbstractNodeTupleGroupingTests<TValue, T extends 
AbstractNodeTupleWritable<TValue>> extends
+        AbstractMapperTests<LongWritable, T, NodeWritable, T> {
+
+    /**
+     * Generates data for use in tests
+     * 
+     * @param driver
+     *            Driver
+     * @param num
+     *            Number of tuples to generate
+     */
+    protected void generateData(MapDriver<LongWritable, T, NodeWritable, T> 
driver, int num) {
+        for (int i = 0; i < num; i++) {
+            LongWritable inputKey = new LongWritable(i);
+            T value = this.createValue(i);
+            NodeWritable outputKey = this.getOutputKey(value);
+
+            driver.addInput(inputKey, value);
+            driver.addOutput(outputKey, value);
+        }
+    }
+
+    protected abstract T createValue(int i);
+
+    protected abstract NodeWritable getOutputKey(T tuple);
+
+    protected final void testGrouping(int num) throws IOException {
+        MapDriver<LongWritable, T, NodeWritable, T> driver = 
this.getMapDriver();
+        this.generateData(driver, num);
+        driver.runTest();
+    }
+
+    /**
+     * Test grouping tuples by nodes
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void grouping_01() throws IOException {
+        this.testGrouping(1);
+    }
+    
+    /**
+     * Test grouping tuples by nodes
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void grouping_02() throws IOException {
+        this.testGrouping(100);
+    }
+    
+    /**
+     * Test grouping tuples by nodes
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void grouping_03() throws IOException {
+        this.testGrouping(1000);
+    }
+    
+    /**
+     * Test grouping tuples by nodes
+     * 
+     * @throws IOException
+     */
+    @Test
+    public final void grouping_04() throws IOException {
+        this.testGrouping(2500);
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/AbstractQuadGroupingTests.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/AbstractQuadGroupingTests.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/AbstractQuadGroupingTests.java
new file mode 100644
index 0000000..562512f
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/AbstractQuadGroupingTests.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.group;
+
+import org.apache.jena.hadoop.rdf.mapreduce.group.AbstractQuadGroupingMapper;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+
+import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
+import com.hp.hpl.jena.graph.NodeFactory;
+import com.hp.hpl.jena.graph.Triple;
+import com.hp.hpl.jena.sparql.core.Quad;
+
+/**
+ * Abstract tests for {@link AbstractQuadGroupingMapper} implementations
+ * 
+ * 
+ * 
+ */
+public abstract class AbstractQuadGroupingTests extends 
AbstractNodeTupleGroupingTests<Quad, QuadWritable> {
+
+    @Override
+    protected QuadWritable createValue(int i) {
+        return new QuadWritable(new Quad(Quad.defaultGraphNodeGenerated, new 
Triple(
+                NodeFactory.createURI("http://subjects/"; + i), 
NodeFactory.createURI("http://predicate";),
+                NodeFactory.createLiteral(Integer.toString(i), 
XSDDatatype.XSDinteger))));
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/AbstractTripleGroupingTests.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/AbstractTripleGroupingTests.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/AbstractTripleGroupingTests.java
new file mode 100644
index 0000000..8e3d33c
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/AbstractTripleGroupingTests.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.group;
+
+import org.apache.jena.hadoop.rdf.mapreduce.group.AbstractTripleGroupingMapper;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
+import com.hp.hpl.jena.graph.NodeFactory;
+import com.hp.hpl.jena.graph.Triple;
+
+/**
+ * Abstract tests for {@link AbstractTripleGroupingMapper} implementations
+ * 
+ *
+ */
+public abstract class AbstractTripleGroupingTests extends 
AbstractNodeTupleGroupingTests<Triple, TripleWritable> {
+
+    @Override
+    protected TripleWritable createValue(int i) {
+        return new TripleWritable(
+                new Triple(NodeFactory.createURI("http://subjects/"; + i), 
NodeFactory.createURI("http://predicate";),
+                        NodeFactory.createLiteral(Integer.toString(i), 
XSDDatatype.XSDinteger)));
+    }
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/QuadGroupByGraphMapperTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/QuadGroupByGraphMapperTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/QuadGroupByGraphMapperTest.java
new file mode 100644
index 0000000..370f820
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/QuadGroupByGraphMapperTest.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.group;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.jena.hadoop.rdf.mapreduce.group.QuadGroupByGraphMapper;
+import org.apache.jena.hadoop.rdf.types.NodeWritable;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+
+
+/**
+ * Tests for the {@link QuadGroupByGraphMapper}
+ * 
+ * 
+ * 
+ */
+public class QuadGroupByGraphMapperTest extends AbstractQuadGroupingTests {
+
+    @Override
+    protected NodeWritable getOutputKey(QuadWritable tuple) {
+        return new NodeWritable(tuple.get().getGraph());
+    }
+
+    @Override
+    protected Mapper<LongWritable, QuadWritable, NodeWritable, QuadWritable> 
getInstance() {
+        return new QuadGroupByGraphMapper<LongWritable>();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/QuadGroupByObjectMapperTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/QuadGroupByObjectMapperTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/QuadGroupByObjectMapperTest.java
new file mode 100644
index 0000000..919696d
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/QuadGroupByObjectMapperTest.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.group;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.jena.hadoop.rdf.mapreduce.group.QuadGroupByObjectMapper;
+import org.apache.jena.hadoop.rdf.types.NodeWritable;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+
+
+/**
+ * Tests for the {@link QuadGroupByObjectMapper}
+ * 
+ * 
+ * 
+ */
+public class QuadGroupByObjectMapperTest extends AbstractQuadGroupingTests {
+
+    @Override
+    protected NodeWritable getOutputKey(QuadWritable tuple) {
+        return new NodeWritable(tuple.get().getObject());
+    }
+
+    @Override
+    protected Mapper<LongWritable, QuadWritable, NodeWritable, QuadWritable> 
getInstance() {
+        return new QuadGroupByObjectMapper<LongWritable>();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/QuadGroupByPredicateMapperTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/QuadGroupByPredicateMapperTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/QuadGroupByPredicateMapperTest.java
new file mode 100644
index 0000000..2a1b520
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/QuadGroupByPredicateMapperTest.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.group;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.jena.hadoop.rdf.mapreduce.group.QuadGroupByPredicateMapper;
+import org.apache.jena.hadoop.rdf.types.NodeWritable;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+
+
+/**
+ * Tests for the {@link QuadGroupByPredicateMapper}
+ * 
+ * 
+ * 
+ */
+public class QuadGroupByPredicateMapperTest extends AbstractQuadGroupingTests {
+
+    @Override
+    protected NodeWritable getOutputKey(QuadWritable tuple) {
+        return new NodeWritable(tuple.get().getPredicate());
+    }
+
+    @Override
+    protected Mapper<LongWritable, QuadWritable, NodeWritable, QuadWritable> 
getInstance() {
+        return new QuadGroupByPredicateMapper<LongWritable>();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/QuadGroupBySubjectMapperTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/QuadGroupBySubjectMapperTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/QuadGroupBySubjectMapperTest.java
new file mode 100644
index 0000000..3b0bb1a
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/QuadGroupBySubjectMapperTest.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.group;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.jena.hadoop.rdf.mapreduce.group.QuadGroupBySubjectMapper;
+import org.apache.jena.hadoop.rdf.types.NodeWritable;
+import org.apache.jena.hadoop.rdf.types.QuadWritable;
+
+
+/**
+ * Tests for the {@link QuadGroupBySubjectMapper}
+ * 
+ * 
+ * 
+ */
+public class QuadGroupBySubjectMapperTest extends AbstractQuadGroupingTests {
+
+    @Override
+    protected NodeWritable getOutputKey(QuadWritable tuple) {
+        return new NodeWritable(tuple.get().getSubject());
+    }
+
+    @Override
+    protected Mapper<LongWritable, QuadWritable, NodeWritable, QuadWritable> 
getInstance() {
+        return new QuadGroupBySubjectMapper<LongWritable>();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/jena/blob/92fb810a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/TripleGroupByObjectMapperTest.java
----------------------------------------------------------------------
diff --git 
a/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/TripleGroupByObjectMapperTest.java
 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/TripleGroupByObjectMapperTest.java
new file mode 100644
index 0000000..c769bb4
--- /dev/null
+++ 
b/jena-hadoop-rdf/hadoop-rdf-mapreduce/src/test/java/org/apache/jena/hadoop/rdf/mapreduce/group/TripleGroupByObjectMapperTest.java
@@ -0,0 +1,46 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *     
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.jena.hadoop.rdf.mapreduce.group;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.jena.hadoop.rdf.mapreduce.group.TripleGroupByObjectMapper;
+import org.apache.jena.hadoop.rdf.types.NodeWritable;
+import org.apache.jena.hadoop.rdf.types.TripleWritable;
+
+
+/**
+ * Tests for the {@link TripleGroupByObjectMapper}
+ * 
+ * 
+ * 
+ */
+public class TripleGroupByObjectMapperTest extends AbstractTripleGroupingTests 
{
+
+    @Override
+    protected NodeWritable getOutputKey(TripleWritable tuple) {
+        return new NodeWritable(tuple.get().getObject());
+    }
+
+    @Override
+    protected Mapper<LongWritable, TripleWritable, NodeWritable, 
TripleWritable> getInstance() {
+        return new TripleGroupByObjectMapper<LongWritable>();
+    }
+
+}

Reply via email to