TINKERPOP-1930 Remove Giraph

Project: http://git-wip-us.apache.org/repos/asf/tinkerpop/repo
Commit: http://git-wip-us.apache.org/repos/asf/tinkerpop/commit/33bfe547
Tree: http://git-wip-us.apache.org/repos/asf/tinkerpop/tree/33bfe547
Diff: http://git-wip-us.apache.org/repos/asf/tinkerpop/diff/33bfe547

Branch: refs/heads/tp33
Commit: 33bfe547b59e06a4902aae7c3970236429d13b7b
Parents: 23ca117
Author: Stephen Mallette <sp...@genoprime.com>
Authored: Fri Mar 23 08:20:05 2018 -0400
Committer: Stephen Mallette <sp...@genoprime.com>
Committed: Mon Mar 26 09:23:35 2018 -0400

----------------------------------------------------------------------
 CHANGELOG.asciidoc                              |   1 +
 docs/preprocessor/install-plugins.sh            |   2 +-
 docs/preprocessor/preprocess-file.sh            |   7 +-
 docs/site/home/gremlin.html                     |   3 +-
 docs/site/home/index.html                       |   1 -
 docs/site/home/providers.html                   |  12 +-
 docs/src/dev/provider/index.asciidoc            |   4 +-
 .../src/reference/gremlin-applications.asciidoc |   9 -
 .../reference/implementations-giraph.asciidoc   | 145 ---------
 .../implementations-hadoop-end.asciidoc         | 144 +--------
 .../implementations-hadoop-start.asciidoc       |  42 +--
 docs/src/reference/index.asciidoc               |   1 -
 .../tutorials/getting-started/index.asciidoc    |  14 +-
 docs/src/upgrade/release-3.4.x.asciidoc         |   8 +
 giraph-gremlin/pom.xml                          | 258 ----------------
 giraph-gremlin/src/assembly/hadoop-job.xml      |  41 ---
 giraph-gremlin/src/assembly/standalone.xml      |  50 ---
 .../giraph/jsr223/GiraphGremlinPlugin.java      |  58 ----
 .../giraph/process/computer/EmptyOutEdges.java  |  80 -----
 .../process/computer/GiraphComputation.java     |  47 ---
 .../process/computer/GiraphGraphComputer.java   | 303 -------------------
 .../giraph/process/computer/GiraphMemory.java   | 198 ------------
 .../process/computer/GiraphMessageCombiner.java |  61 ----
 .../process/computer/GiraphMessenger.java       |  90 ------
 .../giraph/process/computer/GiraphVertex.java   |  39 ---
 .../process/computer/GiraphWorkerContext.java   |  78 -----
 .../process/computer/MemoryAggregator.java      |  68 -----
 .../process/computer/PassThroughMemory.java     |  99 ------
 .../structure/io/GiraphVertexInputFormat.java   |  58 ----
 .../structure/io/GiraphVertexOutputFormat.java  |  55 ----
 .../giraph/structure/io/GiraphVertexReader.java |  68 -----
 .../giraph/structure/io/GiraphVertexWriter.java |  71 -----
 ...pache.tinkerpop.gremlin.jsr223.GremlinPlugin |   1 -
 .../giraph/GiraphGremlinIntegrateTest.java      |  33 --
 .../gremlin/giraph/GiraphGremlinSuite.java      |  35 ---
 .../GiraphHadoopGremlinIntegrateTest.java       |  33 --
 ...GiraphGraphComputerProcessIntegrateTest.java |  32 --
 .../computer/GiraphHadoopGraphProvider.java     |  68 -----
 .../structure/io/GiraphIoRegistryCheck.java     |  64 ----
 .../src/test/resources/giraph-site.xml          |  12 -
 .../src/test/resources/log4j-silent.properties  |  23 --
 .../src/test/resources/log4j-test.properties    |  23 --
 .../tinkerpop/gremlin/jsr223/GremlinPlugin.java |   4 +-
 hadoop-gremlin/conf/hadoop-graphson.properties  |   6 -
 .../conf/hadoop-grateful-gryo.properties        |  12 -
 hadoop-gremlin/conf/hadoop-gryo.properties      |  16 -
 hadoop-gremlin/conf/hadoop-script.properties    |  16 -
 .../tinkerpop/gremlin/hadoop/Constants.java     |   1 -
 .../gremlin/hadoop/structure/HadoopGraph.java   |  17 +-
 pom.xml                                         |  12 +-
 50 files changed, 40 insertions(+), 2483 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/CHANGELOG.asciidoc
----------------------------------------------------------------------
diff --git a/CHANGELOG.asciidoc b/CHANGELOG.asciidoc
index 8ef7bc0..e9c29c9 100644
--- a/CHANGELOG.asciidoc
+++ b/CHANGELOG.asciidoc
@@ -28,6 +28,7 @@ This release also includes changes from <<release-3-3-2, 
3.3.2>>.
 * Change the `toString()` of `Path` to be standardized as other graph elements 
are.
 * Fixed a bug in `ReducingBarrierStep`, that returned the provided seed value 
despite no elements being available.
 * Changed the order of `select()` scopes. The order is now: maps, 
side-effects, paths.
+* Removed support for Giraph.
 
 == TinkerPop 3.3.0 (Gremlin Symphony #40 in G Minor)
 

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/docs/preprocessor/install-plugins.sh
----------------------------------------------------------------------
diff --git a/docs/preprocessor/install-plugins.sh 
b/docs/preprocessor/install-plugins.sh
index 990dbe4..0a7ca31 100755
--- a/docs/preprocessor/install-plugins.sh
+++ b/docs/preprocessor/install-plugins.sh
@@ -25,7 +25,7 @@ TMP_DIR=$3
 INSTALL_TEMPLATE="docs/preprocessor/install-plugins.groovy"
 INSTALL_FILE="${TMP_DIR}/install-plugins.groovy"
 
-plugins=("hadoop-gremlin" "spark-gremlin" "giraph-gremlin" "neo4j-gremlin")
+plugins=("hadoop-gremlin" "spark-gremlin" "neo4j-gremlin")
 # plugins=()
 pluginsCount=${#plugins[@]}
 

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/docs/preprocessor/preprocess-file.sh
----------------------------------------------------------------------
diff --git a/docs/preprocessor/preprocess-file.sh 
b/docs/preprocessor/preprocess-file.sh
index d5076f1..ca1f205 100755
--- a/docs/preprocessor/preprocess-file.sh
+++ b/docs/preprocessor/preprocess-file.sh
@@ -110,7 +110,7 @@ if [ ! ${SKIP} ] && [ $(grep -c '^\[gremlin' ${input}) -gt 
0 ]; then
       mv ext/spark-gremlin .ext/
       cat ext/plugins.txt | tee .ext/plugins.all | grep -Fv 
'SparkGremlinPlugin' > .ext/plugins.txt
       ;;
-    "implementations-hadoop-start" | "implementations-hadoop-end" | 
"implementations-spark" | "implementations-giraph" | "olap-spark-yarn")
+    "implementations-hadoop-start" | "implementations-hadoop-end" | 
"implementations-spark" | "olap-spark-yarn")
       # deactivate Neo4j plugin to prevent version conflicts between 
TinkerPop's Spark jars and Neo4j's Spark jars
       mkdir .ext
       mv ext/neo4j-gremlin .ext/
@@ -121,9 +121,8 @@ if [ ! ${SKIP} ] && [ $(grep -c '^\[gremlin' ${input}) -gt 
0 ]; then
       mkdir .ext
       mv ext/neo4j-gremlin .ext/
       mv ext/spark-gremlin .ext/
-      mv ext/giraph-gremlin .ext/
       mv ext/hadoop-gremlin .ext/
-      cat ext/plugins.txt | tee .ext/plugins.all | grep -v 
'Neo4jGremlinPlugin\|SparkGremlinPlugin\|GiraphGremlinPlugin\|HadoopGremlinPlugin'
 > .ext/plugins.txt
+      cat ext/plugins.txt | tee .ext/plugins.all | grep -v 
'Neo4jGremlinPlugin\|SparkGremlinPlugin\|HadoopGremlinPlugin' > .ext/plugins.txt
       ;;
   esac
 
@@ -135,7 +134,7 @@ if [ ! ${SKIP} ] && [ $(grep -c '^\[gremlin' ${input}) -gt 
0 ]; then
   awk -f ${AWK_SCRIPTS}/prepare.awk |
   awk -f ${AWK_SCRIPTS}/init-code-blocks.awk -v TP_HOME="${TP_HOME}" -v 
PYTHONPATH="${TP_HOME}/gremlin-python/target/classes/Lib" |
   awk -f ${AWK_SCRIPTS}/progressbar.awk -v 
tpl=${AWK_SCRIPTS}/progressbar.groovy.template |
-  
HADOOP_GREMLIN_LIBS="${CONSOLE_HOME}/ext/giraph-gremlin/lib:${CONSOLE_HOME}/ext/tinkergraph-gremlin/lib"
 bin/gremlin.sh |
+  HADOOP_GREMLIN_LIBS="${CONSOLE_HOME}/ext/tinkergraph-gremlin/lib" 
bin/gremlin.sh |
   ${lb} awk -f ${AWK_SCRIPTS}/ignore.awk   |
   ${lb} awk -f ${AWK_SCRIPTS}/prettify.awk |
   ${lb} awk -f ${AWK_SCRIPTS}/cleanup.awk  |

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/docs/site/home/gremlin.html
----------------------------------------------------------------------
diff --git a/docs/site/home/gremlin.html b/docs/site/home/gremlin.html
index bedddf3..6524650 100644
--- a/docs/site/home/gremlin.html
+++ b/docs/site/home/gremlin.html
@@ -339,8 +339,7 @@ g.V().hasLabel("person").
 GraphTraversalSource g;
 g = graph.traversal();                                                         
// local OLTP
 g = graph.traversal().withRemote(DriverRemoteConnection.using("server.yaml"))  
// remote OLTP
-g = graph.traversal().withComputer(SparkGraphComputer.class);                  
// distributed OLAP
-g = graph.traversal().withComputer(GiraphGraphComputer.class);                 
// distributed OLAP</code>
+g = graph.traversal().withComputer(SparkGraphComputer.class);                  
// distributed OLAP/code>
 </pre>
        </div>
        <br/>

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/docs/site/home/index.html
----------------------------------------------------------------------
diff --git a/docs/site/home/index.html b/docs/site/home/index.html
index 5b76cc3..e45b227 100644
--- a/docs/site/home/index.html
+++ b/docs/site/home/index.html
@@ -222,7 +222,6 @@ limitations under the License.
             <li><a 
href="https://github.com/MartinHaeusler/chronos/tree/master/org.chronos.chronograph";>ChronoGraph</a>
 - A versioned graph database.</li>
             <li><a 
href="http://www.datastax.com/products/datastax-enterprise-graph";>DSEGraph</a> 
- DataStax graph database with OLTP and OLAP support.</li>
             <li><a href="https://grakn.ai/";>GRAKN.AI</a> - Distributed 
OLTP/OLAP knowledge graph system.</li>
-            <li><a 
href="http://tinkerpop.apache.org/docs/current/reference/#giraphgraphcomputer";>Hadoop
 (Giraph)</a> - OLAP graph processor using Giraph.</li>
             <li><a 
href="http://tinkerpop.apache.org/docs/current/reference/#sparkgraphcomputer";>Hadoop
 (Spark)</a> - OLAP graph processor using Spark.</li>
             <li><a href="https://github.com/rayokota/hgraphdb";>HGraphDB</a> - 
OLTP graph database running on Apache HBase.</li>
             <li><a 
href="https://console.ng.bluemix.net/catalog/services/ibm-graph/";>IBM Graph</a> 
- OLTP graph database as a service.</li>

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/docs/site/home/providers.html
----------------------------------------------------------------------
diff --git a/docs/site/home/providers.html b/docs/site/home/providers.html
index 269b07d..d0bc59a 100644
--- a/docs/site/home/providers.html
+++ b/docs/site/home/providers.html
@@ -75,7 +75,7 @@ limitations under the License.
          <br/>
          <li><strong>The GraphComputer (optional)</strong>: All OLAP-based 
graph processors must implement the primary graph interfaces mentioned above as 
well as a set of parallel-processing
             message-passing interfaces. However, it is possible for a data 
system to only implement the primary graph interfaces and still provide OLAP 
support to their users by integrating their
-            system with an existing <code>GraphComputer</code> implementation 
such as, for example, <code>SparkGraphComputer</code> or 
<code>GiraphGraphComputer</code> (both are provided in Apache's distribution
+            system with an existing <code>GraphComputer</code> implementation 
such as, for example, <code>SparkGraphComputer</code> (provided in Apache's 
distribution
             of TinkerPop).
          </li>
       </ol>
@@ -109,11 +109,7 @@ limitations under the License.
             into a Big(Graph)Data processor via the OLAP component of the 
Gremlin traversal machine. Users do not have to learn Spark's data processing 
language as Gremlin traversals execute
             over Spark. For graph system providers, they can boast Spark 
integration once a custom <code>InputRDD</code> (or <code>InputFormat</code>) 
is developed.
          </li>
-         <li><strong>GiraphGraphComputer</strong>: <a 
href="http://giraph.apache.org/";>Apache Giraph</a>&trade; is a Big(Graph)Data 
processor that leverages <a href="http://hadoop.apache.org/";>Apache 
Hadoop</a>&reg; and
-            <a href="http://zookeeper.apache.org/";>Apache ZooKeeper</a>&trade; 
for executing distributed graph algorithms. <a 
href="http://tinkerpop.apache.org/docs/current/reference/#giraphgraphcomputer";><code>GiraphGraphComputer</code></a>
-            supports Gremlin OLAP and allows users to submit Gremlin 
traversals to Giraph for distributed execution. Providers can immediately 
advertise Giraph integration once a custom <code>InputFormat</code> is 
developed.
-         </li>
-         <li><strong>Hadoop support</strong>: <a 
href="http://hadoop.apache.org/";>Apache Hadoop</a>&reg; has become a staple 
technology for Big Data applications. In TinkerPop, both 
<code>SparkGraphComputer</code> and <code>GiraphGraphComputer</code> can pull 
data
+         <li><strong>Hadoop support</strong>: <a 
href="http://hadoop.apache.org/";>Apache Hadoop</a>&reg; has become a staple 
technology for Big Data applications. In TinkerPop, 
<code>SparkGraphComputer</code> can pull data
             from the Hadoop File System (<a 
href="https://hadoop.apache.org/docs/stable/hadoop-project-dist/hadoop-hdfs/HdfsUserGuide.html";>HDFS</a>).
 TinkerPop provides a collection of Input- and OutputFormats for different 
graph serialization standards as well as tooling that makes it easy for users
             to <a 
href="http://tinkerpop.apache.org/docs/current/reference/#interacting-with-hdfs";>interact
 with HDFS</a> from the Gremlin Console or their application.
          </li>
@@ -155,7 +151,7 @@ limitations under the License.
          </div>
          <div class="col-sm-6 col-md-6">
            <a href="http://janusgraph.org/";><img 
src="images/logos/janusgraph-logo.png" 
style="padding-right:20px;float:left;width:35%;"></a>
-           <a href="http://janusgraph.org/";>JanusGraph</a> is an Apache2 
licensed scalable, distributed graph database optimized for storing and 
querying graphs containing hundreds of billions of vertices and edges 
distributed across a multi-machine cluster. JanusGraph is a transactional 
database that can support thousands of concurrent users executing complex 
Gremlin traversals in real time. JanusGraph also provides an in-memory, 
compression-based OLAP processor as well as integrates with Apache TinkerPop's 
Spark/Giraph OLAP processors.
+           <a href="http://janusgraph.org/";>JanusGraph</a> is an Apache2 
licensed scalable, distributed graph database optimized for storing and 
querying graphs containing hundreds of billions of vertices and edges 
distributed across a multi-machine cluster. JanusGraph is a transactional 
database that can support thousands of concurrent users executing complex 
Gremlin traversals in real time. JanusGraph also provides an in-memory, 
compression-based OLAP processor as well as integrates with Apache TinkerPop's 
Spark OLAP processors.
          </div>
       </div>
       <br/>
@@ -188,7 +184,7 @@ limitations under the License.
          </div>
          <div class="col-sm-6 col-md-6">
            <a href="http://titan.thinkaurelius.com/";><img 
src="images/logos/titan-logo.png" 
style="padding-right:20px;float:left;width:35%;"></a>
-           <a href="http://titan.thinkaurelius.com/";>Titan</a>&trade; is an 
Apache2 licensed scalable, distributed graph database optimized for storing and 
querying graphs containing hundreds of billions of vertices and edges 
distributed across a multi-machine cluster. Titan is a transactional database 
that can support thousands of concurrent users executing complex Gremlin 
traversals in real time. Titan also provides an in-memory, compression-based 
OLAP processor as well as integrates with Apache TinkerPop's Spark/Giraph OLAP 
processors.
+           <a href="http://titan.thinkaurelius.com/";>Titan</a>&trade; is an 
Apache2 licensed scalable, distributed graph database optimized for storing and 
querying graphs containing hundreds of billions of vertices and edges 
distributed across a multi-machine cluster. Titan is a transactional database 
that can support thousands of concurrent users executing complex Gremlin 
traversals in real time. Titan also provides an in-memory, compression-based 
OLAP processor as well as integrates with Apache TinkerPop's Spark OLAP 
processors.
          </div>
       </div>
       <br/>

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/docs/src/dev/provider/index.asciidoc
----------------------------------------------------------------------
diff --git a/docs/src/dev/provider/index.asciidoc 
b/docs/src/dev/provider/index.asciidoc
index ce4e9f4..d1ef575 100644
--- a/docs/src/dev/provider/index.asciidoc
+++ b/docs/src/dev/provider/index.asciidoc
@@ -99,9 +99,9 @@ implementation is presented below:
 NOTE: The VertexProgram and MapReduce interfaces in the `process/computer/` 
package are not required by the graph
 system. Instead, these are interfaces to be implemented by application 
developers writing VertexPrograms and MapReduce jobs.
 
-IMPORTANT: TinkerPop3 provides three OLAP implementations:
+IMPORTANT: TinkerPop provides two OLAP implementations:
 
link:http://tinkerpop.apache.org/docs/x.y.z/reference/#tinkergraph-gremlin[TinkerGraphComputer]
 (TinkerGraph),
-link:http://tinkerpop.apache.org/docs/x.y.z/reference/#giraphgraphcomputer[GiraphGraphComputer]
 (HadoopGraph), and
+and
 
link:http://tinkerpop.apache.org/docs/x.y.z/reference/#sparkgraphcomputer[SparkGraphComputer]
 (Hadoop).
 Given the complexity of the OLAP system, it is good to study and copy many of 
the patterns used in these reference
 implementations.

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/docs/src/reference/gremlin-applications.asciidoc
----------------------------------------------------------------------
diff --git a/docs/src/reference/gremlin-applications.asciidoc 
b/docs/src/reference/gremlin-applications.asciidoc
index 53a1642..3f6a822 100644
--- a/docs/src/reference/gremlin-applications.asciidoc
+++ b/docs/src/reference/gremlin-applications.asciidoc
@@ -2147,15 +2147,6 @@ Gephi plugin configuration parameters as accepted via 
the `:remote config` comma
 
 NOTE: This plugin is typically only useful to the Gremlin Console and is 
enabled in the there by default.
 
-[[giraph-plugin]]
-=== Giraph Plugin
-
-image:giraph-logo.png[width=50,float=left]  The Giraph Plugin installs as part 
of `giraph-gremlin` and provides
-a number of imports and utility functions to the environment within which it 
is used. Those classes and functions
-provide the basis for supporting <<graphcomputer,OLAP based traversals>> using 
link:http://giraph.apache.org[Giraph].
-This plugin is defined in greater detail in the 
<<giraphgraphcomputer,GiraphGraphComputer>> section and is typically
-installed in conjuction with the <<hadoop-plugin,Hadoop-Plugin>>.
-
 [[graph-plugins]]
 === Graph Plugins
 

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/docs/src/reference/implementations-giraph.asciidoc
----------------------------------------------------------------------
diff --git a/docs/src/reference/implementations-giraph.asciidoc 
b/docs/src/reference/implementations-giraph.asciidoc
deleted file mode 100644
index f83903d..0000000
--- a/docs/src/reference/implementations-giraph.asciidoc
+++ /dev/null
@@ -1,145 +0,0 @@
-////
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements.  See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-////
-[[giraphgraphcomputer]]
-==== GiraphGraphComputer
-
-[source,xml]
-----
-<dependency>
-   <groupId>org.apache.tinkerpop</groupId>
-   <artifactId>giraph-gremlin</artifactId>
-   <version>x.y.z</version>
-</dependency>
-----
-
-image:giraph-logo.png[width=100,float=left] 
link:http://giraph.apache.org[Giraph] is an Apache Software Foundation
-project focused on OLAP-based graph processing. Giraph makes use of the 
distributed graph computing paradigm made
-popular by Google's Pregel. In Giraph, developers write "vertex programs" that 
get executed at each vertex in
-parallel. These programs communicate with one another in a bulk synchronous 
parallel (BSP) manner. This model aligns
-with TinkerPop3's `GraphComputer` API. TinkerPop3 provides an implementation 
of `GraphComputer` that works for Giraph
-called `GiraphGraphComputer`. Moreover, with TinkerPop3's 
<<mapreduce,MapReduce>>-framework, the standard
-Giraph/Pregel model is extended to support an arbitrary number of MapReduce 
phases to aggregate and yield results
-from the graph. Below are examples using `GiraphGraphComputer` from the 
<<gremlin-console,Gremlin-Console>>.
-
-WARNING: Giraph uses a large number of Hadoop counters. The default for Hadoop 
is 120. In `mapred-site.xml` it is
-possible to increase the limit it via the `mapreduce.job.counters.max` 
property. A good value to use is 1000. This
-is a cluster-wide property so be sure to restart the cluster after updating.
-
-WARNING: The maximum number of workers can be no larger than the number of 
map-slots in the Hadoop cluster minus 1.
-For example, if the Hadoop cluster has 4 map slots, then `giraph.maxWorkers` 
can not be larger than 3. One map-slot
-is reserved for the master compute node and all other slots can be allocated 
as workers to execute the VertexPrograms
-on the vertices of the graph.
-
-If `GiraphGraphComputer` will be used as the `GraphComputer` for `HadoopGraph` 
then its `lib` directory should be
-specified in `HADOOP_GREMLIN_LIBS`.
-
-[source,shell]
-export 
HADOOP_GREMLIN_LIBS=$HADOOP_GREMLIN_LIBS:/usr/local/gremlin-console/ext/giraph-gremlin/lib
-
-Or, the user can specify the directory in the Gremlin Console.
-
-[source,groovy]
-System.setProperty('HADOOP_GREMLIN_LIBS',System.getProperty('HADOOP_GREMLIN_LIBS')
 + ':' + '/usr/local/gremlin-console/ext/giraph-gremlin/lib')
-
-[gremlin-groovy]
-----
-graph = GraphFactory.open('conf/hadoop/hadoop-gryo.properties')
-g = graph.traversal().withComputer(GiraphGraphComputer)
-g.V().count()
-g.V().out().out().values('name')
-----
-
-IMPORTANT: The examples above do not use lambdas (i.e. closures in 
Gremlin-Groovy). This makes the traversal
-serializable and thus, able to be distributed to all machines in the Hadoop 
cluster. If a lambda is required in a
-traversal, then the traversal must be sent as a `String` and compiled locally 
at each machine in the cluster. The
-following example demonstrates the `:remote` command which allows for 
submitting Gremlin traversals as a `String`.
-
-[gremlin-groovy]
-----
-graph = GraphFactory.open('conf/hadoop/hadoop-gryo.properties')
-g = graph.traversal().withComputer(GiraphGraphComputer)
-:remote connect tinkerpop.hadoop graph g
-:> g.V().group().by{it.value('name')[1]}.by('name')
-result
-result.memory.runtime
-----
-
-NOTE: If the user explicitly specifies `giraph.maxWorkers` and/or 
`giraph.numComputeThreads` in the configuration,
-then these values will be used by Giraph. However, if these are not specified 
and the user never calls
-`GraphComputer.workers()` then `GiraphGraphComputer` will try to compute the 
number of workers/threads to use based
-on the cluster's profile.
-
-===== Loading with BulkLoaderVertexProgram
-
-The <<bulkloadervertexprogram, BulkLoaderVertexProgram>> is a generalized bulk 
loader that can be used to load
-large amounts of data to and from different `Graph` implementations. The 
following code demonstrates how to load
-the Grateful Dead graph from HadoopGraph into TinkerGraph over Giraph:
-
-[gremlin-groovy]
-----
-hdfs.copyFromLocal('data/grateful-dead.kryo', 'grateful-dead.kryo')
-readGraph = GraphFactory.open('conf/hadoop/hadoop-grateful-gryo.properties')
-writeGraph = 'conf/tinkergraph-gryo.properties'
-blvp = BulkLoaderVertexProgram.build().
-           keepOriginalIds(false).
-           writeGraph(writeGraph).create(readGraph)
-readGraph.compute(GiraphGraphComputer).workers(1).program(blvp).submit().get()
-:set max-iteration 10
-graph = GraphFactory.open(writeGraph)
-g = graph.traversal()
-g.V().valueMap()
-graph.close()
-----
-
-[source,properties]
-----
-# hadoop-grateful-gryo.properties
-
-#
-# Hadoop Graph Configuration
-#
-gremlin.graph=org.apache.tinkerpop.gremlin.hadoop.structure.HadoopGraph
-gremlin.hadoop.graphReader=org.apache.tinkerpop.gremlin.hadoop.structure.io.gryo.GryoInputFormat
-gremlin.hadoop.graphWriter=org.apache.hadoop.mapreduce.lib.output.NullOutputFormat
-gremlin.hadoop.inputLocation=grateful-dead.kryo
-gremlin.hadoop.outputLocation=output
-gremlin.hadoop.jarsInDistributedCache=true
-
-#
-# GiraphGraphComputer Configuration
-#
-giraph.minWorkers=1
-giraph.maxWorkers=1
-giraph.useOutOfCoreGraph=true
-giraph.useOutOfCoreMessages=true
-mapred.map.child.java.opts=-Xmx1024m
-mapred.reduce.child.java.opts=-Xmx1024m
-giraph.numInputThreads=4
-giraph.numComputeThreads=4
-giraph.maxMessagesInMemory=100000
-----
-
-[source,properties]
-----
-# tinkergraph-gryo.properties
-
-gremlin.graph=org.apache.tinkerpop.gremlin.tinkergraph.structure.TinkerGraph
-gremlin.tinkergraph.graphFormat=gryo
-gremlin.tinkergraph.graphLocation=/tmp/tinkergraph.kryo
-----
-
-NOTE: The path to TinkerGraph needs to be included in the 
`HADOOP_GREMLIN_LIBS` for the above example to work.

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/docs/src/reference/implementations-hadoop-end.asciidoc
----------------------------------------------------------------------
diff --git a/docs/src/reference/implementations-hadoop-end.asciidoc 
b/docs/src/reference/implementations-hadoop-end.asciidoc
index f0f0fa6..e11c542 100644
--- a/docs/src/reference/implementations-hadoop-end.asciidoc
+++ b/docs/src/reference/implementations-hadoop-end.asciidoc
@@ -170,7 +170,7 @@ hdfs.ls()
 ==== Interacting with Spark
 
 If a Spark context is persisted, then Spark RDDs will remain the Spark cache 
and accessible over subsequent jobs.
-RDDs are retrieved and saved to the `SparkContext` via `PersistedInputRDD` and 
`PersistedOutputRDD` respectivly.
+RDDs are retrieved and saved to the `SparkContext` via `PersistedInputRDD` and 
`PersistedOutputRDD` respectively.
 Persisted RDDs can be accessed using `spark`.
 
 [gremlin-groovy]
@@ -186,144 +186,4 @@ spark.head('output', PersistedInputRDD)
 spark.head('output', 'clusterCount', PersistedInputRDD)
 spark.rm('output')
 spark.ls()
-----
-
-=== A Command Line Example
-
-image::pagerank-logo.png[width=300]
-
-The classic link:http://en.wikipedia.org/wiki/PageRank[PageRank] centrality 
algorithm can be executed over the
-TinkerPop graph from the command line using `GiraphGraphComputer`.
-
-WARNING: Be sure that the `HADOOP_GREMLIN_LIBS` references the location `lib` 
directory of the respective
-`GraphComputer` engine being used or else the requisite dependencies will not 
be uploaded to the Hadoop cluster.
-
-[source,text]
-----
-$ hdfs dfs -copyFromLocal data/tinkerpop-modern.json tinkerpop-modern.json
-$ hdfs dfs -ls
-Found 2 items
--rw-r--r--   1 marko supergroup       2356 2014-07-28 13:00 
/user/marko/tinkerpop-modern.json
-$ hadoop jar target/giraph-gremlin-x.y.z-job.jar 
org.apache.tinkerpop.gremlin.giraph.process.computer.GiraphGraphComputer 
../hadoop-gremlin/conf/hadoop-graphson.properties
-15/09/11 08:02:08 WARN util.NativeCodeLoader: Unable to load native-hadoop 
library for your platform... using builtin-java classes where applicable
-15/09/11 08:02:11 INFO computer.GiraphGraphComputer: HadoopGremlin(Giraph): 
PageRankVertexProgram[alpha=0.85,iterations=30]
-15/09/11 08:02:12 INFO mapreduce.JobSubmitter: number of splits:3
-15/09/11 08:02:12 INFO mapreduce.JobSubmitter: Submitting tokens for job: 
job_1441915907347_0028
-15/09/11 08:02:12 INFO impl.YarnClientImpl: Submitted application 
application_1441915907347_0028
-15/09/11 08:02:12 INFO job.GiraphJob: Tracking URL: 
http://markos-macbook:8088/proxy/application_1441915907347_0028/
-15/09/11 08:02:12 INFO job.GiraphJob: Waiting for resources... Job will start 
only when it gets all 3 mappers
-15/09/11 08:03:54 INFO mapreduce.Job: Running job: job_1441915907347_0028
-15/09/11 08:03:55 INFO mapreduce.Job: Job job_1441915907347_0028 running in 
uber mode : false
-15/09/11 08:03:55 INFO mapreduce.Job:  map 33% reduce 0%
-15/09/11 08:03:57 INFO mapreduce.Job:  map 67% reduce 0%
-15/09/11 08:04:01 INFO mapreduce.Job:  map 100% reduce 0%
-15/09/11 08:06:17 INFO mapreduce.Job: Job job_1441915907347_0028 completed 
successfully
-15/09/11 08:06:17 INFO mapreduce.Job: Counters: 80
-    File System Counters
-        FILE: Number of bytes read=0
-        FILE: Number of bytes written=483918
-        FILE: Number of read operations=0
-        FILE: Number of large read operations=0
-        FILE: Number of write operations=0
-        HDFS: Number of bytes read=1465
-        HDFS: Number of bytes written=1760
-        HDFS: Number of read operations=39
-        HDFS: Number of large read operations=0
-        HDFS: Number of write operations=20
-    Job Counters
-        Launched map tasks=3
-        Other local map tasks=3
-        Total time spent by all maps in occupied slots (ms)=458105
-        Total time spent by all reduces in occupied slots (ms)=0
-        Total time spent by all map tasks (ms)=458105
-        Total vcore-seconds taken by all map tasks=458105
-        Total megabyte-seconds taken by all map tasks=469099520
-    Map-Reduce Framework
-        Map input records=3
-        Map output records=0
-        Input split bytes=132
-        Spilled Records=0
-        Failed Shuffles=0
-        Merged Map outputs=0
-        GC time elapsed (ms)=1594
-        CPU time spent (ms)=0
-        Physical memory (bytes) snapshot=0
-        Virtual memory (bytes) snapshot=0
-        Total committed heap usage (bytes)=527958016
-    Giraph Stats
-        Aggregate edges=0
-        Aggregate finished vertices=0
-        Aggregate sent message message bytes=13535
-        Aggregate sent messages=186
-        Aggregate vertices=6
-        Current master task partition=0
-        Current workers=2
-        Last checkpointed superstep=0
-        Sent message bytes=438
-        Sent messages=6
-        Superstep=31
-    Giraph Timers
-        Initialize (ms)=2996
-        Input superstep (ms)=5209
-        Setup (ms)=59
-        Shutdown (ms)=9324
-        Superstep 0 GiraphComputation (ms)=3861
-        Superstep 1 GiraphComputation (ms)=4027
-        Superstep 10 GiraphComputation (ms)=4000
-        Superstep 11 GiraphComputation (ms)=4004
-        Superstep 12 GiraphComputation (ms)=3999
-        Superstep 13 GiraphComputation (ms)=4000
-        Superstep 14 GiraphComputation (ms)=4005
-        Superstep 15 GiraphComputation (ms)=4003
-        Superstep 16 GiraphComputation (ms)=4001
-        Superstep 17 GiraphComputation (ms)=4007
-        Superstep 18 GiraphComputation (ms)=3998
-        Superstep 19 GiraphComputation (ms)=4006
-        Superstep 2 GiraphComputation (ms)=4007
-        Superstep 20 GiraphComputation (ms)=3996
-        Superstep 21 GiraphComputation (ms)=4006
-        Superstep 22 GiraphComputation (ms)=4002
-        Superstep 23 GiraphComputation (ms)=3998
-        Superstep 24 GiraphComputation (ms)=4003
-        Superstep 25 GiraphComputation (ms)=4001
-        Superstep 26 GiraphComputation (ms)=4003
-        Superstep 27 GiraphComputation (ms)=4005
-        Superstep 28 GiraphComputation (ms)=4002
-        Superstep 29 GiraphComputation (ms)=4001
-        Superstep 3 GiraphComputation (ms)=3988
-        Superstep 30 GiraphComputation (ms)=4248
-        Superstep 4 GiraphComputation (ms)=4010
-        Superstep 5 GiraphComputation (ms)=3998
-        Superstep 6 GiraphComputation (ms)=3996
-        Superstep 7 GiraphComputation (ms)=4005
-        Superstep 8 GiraphComputation (ms)=4009
-        Superstep 9 GiraphComputation (ms)=3994
-        Total (ms)=138788
-    File Input Format Counters
-        Bytes Read=0
-    File Output Format Counters
-        Bytes Written=0
-$ hdfs dfs -cat output/~g/*
-{"id":1,"label":"person","properties":{"gremlin.pageRankVertexProgram.pageRank":[{"id":39,"value":0.15000000000000002}],"name":[{"id":0,"value":"marko"}],"gremlin.pageRankVertexProgram.edgeCount":[{"id":10,"value":3.0}],"age":[{"id":1,"value":29}]}}
-{"id":5,"label":"software","properties":{"gremlin.pageRankVertexProgram.pageRank":[{"id":35,"value":0.23181250000000003}],"name":[{"id":8,"value":"ripple"}],"gremlin.pageRankVertexProgram.edgeCount":[{"id":6,"value":0.0}],"lang":[{"id":9,"value":"java"}]}}
-{"id":3,"label":"software","properties":{"gremlin.pageRankVertexProgram.pageRank":[{"id":39,"value":0.4018125}],"name":[{"id":4,"value":"lop"}],"gremlin.pageRankVertexProgram.edgeCount":[{"id":10,"value":0.0}],"lang":[{"id":5,"value":"java"}]}}
-{"id":4,"label":"person","properties":{"gremlin.pageRankVertexProgram.pageRank":[{"id":39,"value":0.19250000000000003}],"name":[{"id":6,"value":"josh"}],"gremlin.pageRankVertexProgram.edgeCount":[{"id":10,"value":2.0}],"age":[{"id":7,"value":32}]}}
-{"id":2,"label":"person","properties":{"gremlin.pageRankVertexProgram.pageRank":[{"id":35,"value":0.19250000000000003}],"name":[{"id":2,"value":"vadas"}],"gremlin.pageRankVertexProgram.edgeCount":[{"id":6,"value":0.0}],"age":[{"id":3,"value":27}]}}
-{"id":6,"label":"person","properties":{"gremlin.pageRankVertexProgram.pageRank":[{"id":35,"value":0.15000000000000002}],"name":[{"id":10,"value":"peter"}],"gremlin.pageRankVertexProgram.edgeCount":[{"id":6,"value":1.0}],"age":[{"id":11,"value":35}]}}
-----
-
-Vertex 4 ("josh") is isolated below:
-
-[source,js]
-----
-{
-  "id":4,
-  "label":"person",
-  "properties": {
-    
"gremlin.pageRankVertexProgram.pageRank":[{"id":39,"value":0.19250000000000003}],
-    "name":[{"id":6,"value":"josh"}],
-    "gremlin.pageRankVertexProgram.edgeCount":[{"id":10,"value":2.0}],
-    "age":[{"id":7,"value":32}]}
-  }
-}
-----
+----
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/docs/src/reference/implementations-hadoop-start.asciidoc
----------------------------------------------------------------------
diff --git a/docs/src/reference/implementations-hadoop-start.asciidoc 
b/docs/src/reference/implementations-hadoop-start.asciidoc
index 31ecf6b..8825009 100644
--- a/docs/src/reference/implementations-hadoop-start.asciidoc
+++ b/docs/src/reference/implementations-hadoop-start.asciidoc
@@ -34,9 +34,8 @@ using both TinkerPop3's OLTP and OLAP graph computing models.
 IMPORTANT: This section assumes that the user has a Hadoop 2.x cluster 
functioning. For more information on getting
 started with Hadoop, please see the
 
link:http://hadoop.apache.org/docs/r2.7.2/hadoop-project-dist/hadoop-common/SingleCluster.html[Single
 Node Setup]
-tutorial. Moreover, if using `GiraphGraphComputer` or `SparkGraphComputer` it 
is advisable that the reader also
-familiarize their self with Giraph 
(link:http://giraph.apache.org/quick_start.html[Getting Started]) and Spark
-(link:http://spark.apache.org/docs/latest/quick-start.html[Quick Start]).
+tutorial. Moreover, if using `SparkGraphComputer` it is advisable that the 
reader also
+familiarize their self with and Spark 
(link:http://spark.apache.org/docs/latest/quick-start.html[Quick Start]).
 
 === Installing Hadoop-Gremlin
 
@@ -87,11 +86,7 @@ distributed cache 
(link:http://hadoop.apache.org/docs/current/hadoop-yarn/hadoop
 Note that the locations in `HADOOP_GREMLIN_LIBS` can be colon-separated (`:`) 
and all jars from all locations will
 be loaded into the cluster. Locations can be local paths (e.g. 
`/path/to/libs`), but may also be prefixed with a file
 scheme to reference files or directories in different file systems (e.g. 
`hdfs:///path/to/distributed/libs`).
-Typically, only the jars of the respective GraphComputer are required to be 
loaded (e.g. `GiraphGraphComputer` plugin lib
-directory).
-
-[source,shell]
-export HADOOP_GREMLIN_LIBS=/usr/local/gremlin-console/ext/giraph-gremlin/lib
+Typically, only the jars of the respective `GraphComputer` are required to be 
loaded.
 
 === Properties Files
 
@@ -113,21 +108,9 @@ spark.master=local[4]
 spark.executor.memory=1g
 
spark.serializer=org.apache.tinkerpop.gremlin.spark.structure.io.gryo.GryoSerializer
 gremlin.spark.persistContext=true
-#####################################
-# GiraphGraphComputer Configuration #
-#####################################
-giraph.minWorkers=2
-giraph.maxWorkers=2
-giraph.useOutOfCoreGraph=true
-giraph.useOutOfCoreMessages=true
-mapreduce.map.java.opts=-Xmx1024m
-mapreduce.reduce.java.opts=-Xmx1024m
-giraph.numInputThreads=2
-giraph.numComputeThreads=2
 
 A review of the Hadoop-Gremlin specific properties are provided in the table 
below. For the respective OLAP
-engines (<<sparkgraphcomputer,`SparkGraphComputer`>> or 
<<giraphgraphcomputer,`GiraphGraphComputer`>>) refer
-to their respective documentation for configuration options.
+engines (<<sparkgraphcomputer,`SparkGraphComputer`>> refer to their respective 
documentation for configuration options.
 
 [width="100%",cols="2,10",options="header"]
 |=========================================================
@@ -145,7 +128,7 @@ Along with the properties above, the numerous 
link:http://hadoop.apache.org/docs
 can be added as needed to tune and parameterize the executed Hadoop-Gremlin 
job on the respective Hadoop cluster.
 
 IMPORTANT: As the size of the graphs being processed becomes large, it is 
important to fully understand how the
-underlying OLAP engine (e.g. Spark, Giraph, etc.) works and understand the 
numerous parameterizations offered by
+underlying OLAP engine (e.g. Spark, etc.) works and understand the numerous 
parameterizations offered by
 these systems. Such knowledge can help alleviate out of memory exceptions, 
slow load times, slow processing times,
 garbage collection issues, etc.
 
@@ -183,17 +166,11 @@ supports the following two implementations.
 * <<sparkgraphcomputer,`SparkGraphComputer`>>: Leverages Apache Spark to 
execute TinkerPop3 OLAP computations.
 ** The graph may fit within the total RAM of the cluster (supports larger 
graphs). Message passing is coordinated via
 Spark map/reduce/join operations on in-memory and disk-cached data (average 
speed traversals).
-* <<giraphgraphcomputer,`GiraphGraphComputer`>>: Leverages Apache Giraph to 
execute TinkerPop3 OLAP computations.
-** The graph should fit within the total RAM of the Hadoop cluster (graph size 
restriction), though "out-of-core"
-processing is possible. Message passing is coordinated via ZooKeeper for the 
in-memory graph (speedy traversals).
 
 TIP: image:gremlin-sugar.png[width=50,float=left] For those wanting to use the 
<<sugar-plugin,SugarPlugin>> with
 their submitted traversal, do `:remote config useSugar true` as well as 
`:plugin use tinkerpop.sugar` at the start of
 the Gremlin Console session if it is not already activated.
 
-Note that `SparkGraphComputer` and `GiraphGraphComputer` are loaded via their 
respective plugins. Typically only
-one plugin or the other is loaded depending on the desired `GraphComputer` to 
use.
-
 [source,text]
 ----
 $ bin/gremlin.sh
@@ -205,8 +182,6 @@ plugin activated: tinkerpop.server
 plugin activated: tinkerpop.utilities
 plugin activated: tinkerpop.tinkergraph
 plugin activated: tinkerpop.hadoop
-gremlin> :install org.apache.tinkerpop giraph-gremlin x.y.z
-==>loaded: [org.apache.tinkerpop, giraph-gremlin, x.y.z] - restart the console 
to use [tinkerpop.giraph]
 gremlin> :install org.apache.tinkerpop spark-gremlin x.y.z
 ==>loaded: [org.apache.tinkerpop, spark-gremlin, x.y.z] - restart the console 
to use [tinkerpop.spark]
 gremlin> :q
@@ -219,13 +194,10 @@ plugin activated: tinkerpop.server
 plugin activated: tinkerpop.utilities
 plugin activated: tinkerpop.tinkergraph
 plugin activated: tinkerpop.hadoop
-gremlin> :plugin use tinkerpop.giraph
-==>tinkerpop.giraph activated
 gremlin> :plugin use tinkerpop.spark
 ==>tinkerpop.spark activated
 ----
 
-WARNING: Hadoop, Spark, and Giraph all depend on many of the same libraries 
(e.g. ZooKeeper, Snappy, Netty, Guava,
+WARNING: Hadoop and Spark all depend on many of the same libraries (e.g. 
ZooKeeper, Snappy, Netty, Guava,
 etc.). Unfortunately, typically these dependencies are not to the same 
versions of the respective libraries. As such,
-it is best to *not* have both Spark and Giraph plugins loaded in the same 
console session nor in the same Java
-project (though intelligent `<exclusion>`-usage can help alleviate conflicts 
in a Java project).
+it is may be necessary to manually cleanup dependency conflicts among 
different plugins.

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/docs/src/reference/index.asciidoc
----------------------------------------------------------------------
diff --git a/docs/src/reference/index.asciidoc 
b/docs/src/reference/index.asciidoc
index decc7ef..6e81bf2 100644
--- a/docs/src/reference/index.asciidoc
+++ b/docs/src/reference/index.asciidoc
@@ -41,7 +41,6 @@ include::implementations-neo4j.asciidoc[]
 // console is to have a new asciidoc page.
 include::implementations-hadoop-start.asciidoc[]
 include::implementations-spark.asciidoc[]
-include::implementations-giraph.asciidoc[]
 include::implementations-hadoop-end.asciidoc[]
 
 include::gremlin-variants.asciidoc[]

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/docs/src/tutorials/getting-started/index.asciidoc
----------------------------------------------------------------------
diff --git a/docs/src/tutorials/getting-started/index.asciidoc 
b/docs/src/tutorials/getting-started/index.asciidoc
index af3aebc..622c596 100644
--- a/docs/src/tutorials/getting-started/index.asciidoc
+++ b/docs/src/tutorials/getting-started/index.asciidoc
@@ -465,13 +465,13 @@ implementations are behind TinkerPop APIs, which open the 
possibility to switch
 
 TinkerPop has always had the vision of being an abstraction over different 
graph databases. That much
 is not new and dates back to TinkerPop 1.x. It is in TinkerPop 3.x however 
that we see the introduction of the notion
-that TinkerPop is also an abstraction over different graph processors like 
link:http://spark.apache.org[Spark] and
-link:http://giraph.apache.org/[Giraph]. The scope of this tutorial does not 
permit it to delve into
-"graph processors", but the short story is that the same Gremlin statement we 
wrote in the examples above can be
-executed to run in distributed fashion over Spark or Hadoop. The changes 
required to the code to do this are not
-in the traversal itself, but in the definition of the `TraversalSource`. You 
can again see why we encourage, graph
-operations to be executed through that class as opposed to just using `Graph`. 
You can read more about these
-features in this section on 
link:http://tinkerpop.apache.org/docs/x.y.z/reference/#hadoop-gremlin[hadoop-gremlin].
+that TinkerPop is also an abstraction over different graph processors like 
link:http://spark.apache.org[Spark]. The
+scope of this tutorial does not permit it to delve into "graph processors", 
but the short story is that the same
+Gremlin statement we wrote in the examples above can be executed to run in 
distributed fashion over Spark or Hadoop.
+The changes required to the code to do this are not in the traversal itself, 
but in the definition of the
+`TraversalSource`. You can again see why we encourage, graph operations to be 
executed through that class as opposed
+to just using `Graph`. You can read more about these features in this section 
on
+link:http://tinkerpop.apache.org/docs/x.y.z/reference/#hadoop-gremlin[hadoop-gremlin].
 
 TIP: To maintain an abstraction over `Graph` creation use 
`GraphFactory.open()` to construct new instances. See
 the documentation for individual `Graph` implementations to learn about the 
configuration options to provide.

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/docs/src/upgrade/release-3.4.x.asciidoc
----------------------------------------------------------------------
diff --git a/docs/src/upgrade/release-3.4.x.asciidoc 
b/docs/src/upgrade/release-3.4.x.asciidoc
index 1916da8..713916a 100644
--- a/docs/src/upgrade/release-3.4.x.asciidoc
+++ b/docs/src/upgrade/release-3.4.x.asciidoc
@@ -29,6 +29,14 @@ Please see the 
link:https://github.com/apache/tinkerpop/blob/3.4.0/CHANGELOG.asc
 
 === Upgrading for Users
 
+==== Removal of Giraph Support
+
+Support for Giraph has been removed as of this version. There were a number of 
reasons for this decision which were
+discussed in the community prior to taking this step. Users should switch to 
Spark for their OLAP based graph-computing
+needs.
+
+See: link:https://issues.apache.org/jira/browse/TINKERPOP-1930[TINKERPOP-1930]
+
 ==== Modifications to reducing barrier steps
 
 The behavior of `min()`, `max()`, `mean()` and `sum()` has been modified to 
return no result if there's no input. Previously these steps yielded the 
internal seed value:

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/giraph-gremlin/pom.xml
----------------------------------------------------------------------
diff --git a/giraph-gremlin/pom.xml b/giraph-gremlin/pom.xml
deleted file mode 100644
index 8cf3f5b..0000000
--- a/giraph-gremlin/pom.xml
+++ /dev/null
@@ -1,258 +0,0 @@
-<!--
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements.  See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License.  You may obtain a copy of the License at
-
-  http://www.apache.org/licenses/LICENSE-2.0
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-<project xmlns="http://maven.apache.org/POM/4.0.0";
-         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
-         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
-    <modelVersion>4.0.0</modelVersion>
-    <parent>
-        <groupId>org.apache.tinkerpop</groupId>
-        <artifactId>tinkerpop</artifactId>
-        <version>3.4.0-SNAPSHOT</version>
-    </parent>
-    <artifactId>giraph-gremlin</artifactId>
-    <name>Apache TinkerPop :: Giraph Gremlin</name>
-    <dependencyManagement>
-        <dependencies>
-            <dependency>
-                <!-- see: https://github.com/apache/hadoop/pull/84 -->
-                <groupId>io.netty</groupId>
-                <artifactId>netty</artifactId>
-                <version>3.7.1.Final</version>
-            </dependency>
-        </dependencies>
-    </dependencyManagement>
-    <dependencies>
-        <dependency>
-            <groupId>org.apache.tinkerpop</groupId>
-            <artifactId>gremlin-core</artifactId>
-            <version>${project.version}</version>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.tinkerpop</groupId>
-            <artifactId>hadoop-gremlin</artifactId>
-            <version>${project.version}</version>
-            <exclusions>
-                <exclusion>
-                    <groupId>io.netty</groupId>
-                    <artifactId>netty-all</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>com.thoughtworks.paranamer</groupId>
-                    <artifactId>paranamer</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.apache.zookeeper</groupId>
-                    <artifactId>zookeeper</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <!-- GIRAPH -->
-        <dependency>
-            <groupId>org.apache.giraph</groupId>
-            <artifactId>giraph-core</artifactId>
-            <version>1.1.0-hadoop2</version>
-            <exclusions>
-                <!-- self conflicts -->
-                <exclusion>
-                    <groupId>log4j</groupId>
-                    <artifactId>log4j</artifactId>
-                </exclusion>
-                <!-- conflicts with hadoop-gremlin -->
-                <exclusion>
-                    <groupId>com.google.guava</groupId>
-                    <artifactId>guava</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>commons-codec</groupId>
-                    <artifactId>commons-codec</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>commons-io</groupId>
-                    <artifactId>commons-io</artifactId>
-                </exclusion>
-                <!-- uses an older version that conflicts with spark 2.0 -->
-                <exclusion>
-                    <groupId>com.fasterxml.jackson.core</groupId>
-                    <artifactId>jackson-annotations</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.codehaus.jackson</groupId>
-                    <artifactId>jackson-core-asl</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.codehaus.jackson</groupId>
-                    <artifactId>jackson-mapper-asl</artifactId>
-                </exclusion>
-                <!-- conflicts with gremlin-groovy -->
-                <exclusion>
-                    <groupId>org.apache.commons</groupId>
-                    <artifactId>commons-lang3</artifactId>
-                </exclusion>
-                <!-- conflicts with gremlin-core -->
-                <exclusion>
-                    <groupId>org.slf4j</groupId>
-                    <artifactId>slf4j-api</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.slf4j</groupId>
-                    <artifactId>slf4j-log4j12</artifactId>
-                </exclusion>
-                <!-- gremlin-groovy conflicts -->
-                <exclusion>
-                    <groupId>jline</groupId>
-                    <artifactId>jline</artifactId>
-                </exclusion>
-                <!-- conflicts with spark (a hack so both can be loaded in 
parallel) -->
-                <exclusion>
-                    <groupId>javax.servlet</groupId>
-                    <artifactId>servlet-api</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <!-- consistent dependencies -->
-        <dependency>
-            <groupId>javax.servlet</groupId>
-            <artifactId>javax.servlet-api</artifactId>
-            <version>3.1.0</version>
-        </dependency>
-        <dependency>
-            <groupId>com.fasterxml.jackson.core</groupId>
-            <artifactId>jackson-annotations</artifactId>
-            <version>2.6.5</version>
-        </dependency>
-        <!-- TEST -->
-        <dependency>
-            <groupId>org.apache.tinkerpop</groupId>
-            <artifactId>gremlin-test</artifactId>
-            <version>${project.version}</version>
-            <exclusions>
-                <exclusion>
-                    <groupId>commons-io</groupId>
-                    <artifactId>commons-io</artifactId>
-                </exclusion>
-            </exclusions>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.tinkerpop</groupId>
-            <artifactId>hadoop-gremlin</artifactId>
-            <version>${project.version}</version>
-            <type>test-jar</type>
-            <scope>test</scope>
-            <exclusions>
-                <exclusion>
-                    <groupId>io.netty</groupId>
-                    <artifactId>netty-all</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>com.thoughtworks.paranamer</groupId>
-                    <artifactId>paranamer</artifactId>
-                </exclusion>
-                <exclusion>
-                    <groupId>org.apache.zookeeper</groupId>
-                    <artifactId>zookeeper</artifactId>
-                </exclusion>
-            </exclusions>
-        </dependency>
-        <dependency>
-            <groupId>org.apache.tinkerpop</groupId>
-            <artifactId>tinkergraph-gremlin</artifactId>
-            <version>${project.version}</version>
-            <scope>test</scope>
-        </dependency>
-        <dependency>
-            <groupId>org.slf4j</groupId>
-            <artifactId>slf4j-log4j12</artifactId>
-            <scope>test</scope>
-        </dependency>
-    </dependencies>
-    <build>
-        <directory>${basedir}/target</directory>
-        <finalName>${project.artifactId}-${project.version}</finalName>
-        <resources>
-            <resource>
-                <directory>${basedir}/src/main/resources</directory>
-            </resource>
-        </resources>
-        <testResources>
-            <testResource>
-                <directory>${basedir}/src/test/resources</directory>
-            </testResource>
-        </testResources>
-        <plugins>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-assembly-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <id>build-detached-assemblies</id>
-                        <phase>package</phase>
-                        <goals>
-                            <goal>single</goal>
-                        </goals>
-                        <configuration>
-                            <attach>false</attach>
-                            <descriptors>
-                                
<descriptor>src/assembly/standalone.xml</descriptor>
-                                
<descriptor>src/assembly/hadoop-job.xml</descriptor>
-                            </descriptors>
-                            
<finalName>${project.artifactId}-${project.version}</finalName>
-                        </configuration>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-surefire-plugin</artifactId>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-failsafe-plugin</artifactId>
-            </plugin>
-            <plugin>
-                <groupId>org.codehaus.gmavenplus</groupId>
-                <artifactId>gmavenplus-plugin</artifactId>
-                <executions>
-                    <execution>
-                        <goals>
-                            <goal>addSources</goal>
-                            <goal>addTestSources</goal>
-                            <goal>generateStubs</goal>
-                            <goal>compile</goal>
-                            <goal>generateTestStubs</goal>
-                            <goal>compileTests</goal>
-                            <goal>removeStubs</goal>
-                            <goal>removeTestStubs</goal>
-                        </goals>
-                    </execution>
-                </executions>
-            </plugin>
-            <plugin>
-                <groupId>org.apache.maven.plugins</groupId>
-                <artifactId>maven-jar-plugin</artifactId>
-                <configuration>
-                    <archive>
-                        <manifestEntries>
-                            
<Gremlin-Plugin-Dependencies>org.apache.hadoop:hadoop-client:${hadoop.version}
-                            </Gremlin-Plugin-Dependencies>
-                        </manifestEntries>
-                    </archive>
-                </configuration>
-            </plugin>
-        </plugins>
-    </build>
-</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/giraph-gremlin/src/assembly/hadoop-job.xml
----------------------------------------------------------------------
diff --git a/giraph-gremlin/src/assembly/hadoop-job.xml 
b/giraph-gremlin/src/assembly/hadoop-job.xml
deleted file mode 100644
index 65233e9..0000000
--- a/giraph-gremlin/src/assembly/hadoop-job.xml
+++ /dev/null
@@ -1,41 +0,0 @@
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~ http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing,
-  ~ software distributed under the License is distributed on an
-  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  ~ KIND, either express or implied.  See the License for the
-  ~ specific language governing permissions and limitations
-  ~ under the License.
-  -->
-<assembly>
-    <id>job</id>
-    <formats>
-        <format>jar</format>
-    </formats>
-    <includeBaseDirectory>false</includeBaseDirectory>
-    <dependencySets>
-        <dependencySet>
-            <unpack>false</unpack>
-            <scope>runtime</scope>
-            <outputDirectory>lib</outputDirectory>
-            <excludes>
-                <exclude>${groupId}:${artifactId}</exclude>
-            </excludes>
-        </dependencySet>
-        <dependencySet>
-            <unpack>true</unpack>
-            <includes>
-                <include>${groupId}:${artifactId}</include>
-            </includes>
-        </dependencySet>
-    </dependencySets>
-</assembly>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/giraph-gremlin/src/assembly/standalone.xml
----------------------------------------------------------------------
diff --git a/giraph-gremlin/src/assembly/standalone.xml 
b/giraph-gremlin/src/assembly/standalone.xml
deleted file mode 100644
index 82b8514..0000000
--- a/giraph-gremlin/src/assembly/standalone.xml
+++ /dev/null
@@ -1,50 +0,0 @@
-<!--
-  ~ Licensed to the Apache Software Foundation (ASF) under one
-  ~ or more contributor license agreements.  See the NOTICE file
-  ~ distributed with this work for additional information
-  ~ regarding copyright ownership.  The ASF licenses this file
-  ~ to you under the Apache License, Version 2.0 (the
-  ~ "License"); you may not use this file except in compliance
-  ~ with the License.  You may obtain a copy of the License at
-  ~
-  ~ http://www.apache.org/licenses/LICENSE-2.0
-  ~
-  ~ Unless required by applicable law or agreed to in writing,
-  ~ software distributed under the License is distributed on an
-  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-  ~ KIND, either express or implied.  See the License for the
-  ~ specific language governing permissions and limitations
-  ~ under the License.
-  -->
-<assembly>
-    <id>standalone</id>
-    <formats>
-        <format>dir</format>
-    </formats>
-    <includeBaseDirectory>false</includeBaseDirectory>
-
-    <fileSets>
-        <fileSet>
-            <directory>src/main/bin</directory>
-            <outputDirectory>/bin</outputDirectory>
-            <fileMode>0755</fileMode>
-        </fileSet>
-        <fileSet>
-            <directory>target/*.jar</directory>
-            <outputDirectory>/lib</outputDirectory>
-        </fileSet>
-    </fileSets>
-
-    <dependencySets>
-        <dependencySet>
-            <outputDirectory>/lib</outputDirectory>
-            <unpack>false</unpack>
-            <scope>compile</scope>
-        </dependencySet>
-        <dependencySet>
-            <outputDirectory>/lib</outputDirectory>
-            <unpack>false</unpack>
-            <scope>provided</scope>
-        </dependencySet>
-    </dependencySets>
-</assembly>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/giraph-gremlin/src/main/java/org/apache/tinkerpop/gremlin/giraph/jsr223/GiraphGremlinPlugin.java
----------------------------------------------------------------------
diff --git 
a/giraph-gremlin/src/main/java/org/apache/tinkerpop/gremlin/giraph/jsr223/GiraphGremlinPlugin.java
 
b/giraph-gremlin/src/main/java/org/apache/tinkerpop/gremlin/giraph/jsr223/GiraphGremlinPlugin.java
deleted file mode 100644
index c512335..0000000
--- 
a/giraph-gremlin/src/main/java/org/apache/tinkerpop/gremlin/giraph/jsr223/GiraphGremlinPlugin.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.tinkerpop.gremlin.giraph.jsr223;
-
-import org.apache.tinkerpop.gremlin.giraph.process.computer.EmptyOutEdges;
-import org.apache.tinkerpop.gremlin.giraph.process.computer.GiraphComputation;
-import 
org.apache.tinkerpop.gremlin.giraph.process.computer.GiraphGraphComputer;
-import org.apache.tinkerpop.gremlin.giraph.process.computer.GiraphMemory;
-import 
org.apache.tinkerpop.gremlin.giraph.process.computer.GiraphMessageCombiner;
-import org.apache.tinkerpop.gremlin.giraph.process.computer.GiraphMessenger;
-import org.apache.tinkerpop.gremlin.giraph.process.computer.GiraphVertex;
-import 
org.apache.tinkerpop.gremlin.giraph.process.computer.GiraphWorkerContext;
-import org.apache.tinkerpop.gremlin.giraph.process.computer.MemoryAggregator;
-import org.apache.tinkerpop.gremlin.giraph.process.computer.PassThroughMemory;
-import org.apache.tinkerpop.gremlin.jsr223.AbstractGremlinPlugin;
-import org.apache.tinkerpop.gremlin.jsr223.DefaultImportCustomizer;
-
-/**
- * @author Stephen Mallette (http://stephen.genoprime.com)
- */
-public final class GiraphGremlinPlugin extends AbstractGremlinPlugin {
-    private static final String MODULE_NAME = "tinkerpop.giraph";
-    private static final GiraphGremlinPlugin instance = new 
GiraphGremlinPlugin();
-
-    public GiraphGremlinPlugin() {
-        super(MODULE_NAME, DefaultImportCustomizer.build().addClassImports(
-                EmptyOutEdges.class,
-                GiraphComputation.class,
-                GiraphGraphComputer.class,
-                GiraphMemory.class,
-                GiraphMessageCombiner.class,
-                GiraphMessenger.class,
-                GiraphVertex.class,
-                GiraphWorkerContext.class,
-                MemoryAggregator.class,
-                PassThroughMemory.class).create());
-    }
-
-    public static GiraphGremlinPlugin instance() {
-        return instance;
-    }
-}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/giraph-gremlin/src/main/java/org/apache/tinkerpop/gremlin/giraph/process/computer/EmptyOutEdges.java
----------------------------------------------------------------------
diff --git 
a/giraph-gremlin/src/main/java/org/apache/tinkerpop/gremlin/giraph/process/computer/EmptyOutEdges.java
 
b/giraph-gremlin/src/main/java/org/apache/tinkerpop/gremlin/giraph/process/computer/EmptyOutEdges.java
deleted file mode 100644
index 4df4835..0000000
--- 
a/giraph-gremlin/src/main/java/org/apache/tinkerpop/gremlin/giraph/process/computer/EmptyOutEdges.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.tinkerpop.gremlin.giraph.process.computer;
-
-import org.apache.giraph.edge.Edge;
-import org.apache.giraph.edge.OutEdges;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.util.Collections;
-import java.util.Iterator;
-
-/**
- * @author Marko A. Rodriguez (http://markorodriguez.com)
- */
-public final class EmptyOutEdges implements OutEdges<ObjectWritable, 
NullWritable> {
-
-    private static final EmptyOutEdges INSTANCE = new EmptyOutEdges();
-
-    public static EmptyOutEdges instance() {
-        return INSTANCE;
-    }
-
-    @Override
-    public void initialize(final Iterable<Edge<ObjectWritable, NullWritable>> 
edges) {
-    }
-
-    @Override
-    public void initialize(final int capacity) {
-    }
-
-    @Override
-    public void initialize() {
-    }
-
-    @Override
-    public void add(final Edge<ObjectWritable, NullWritable> edge) {
-    }
-
-    @Override
-    public void remove(final ObjectWritable targetVertexId) {
-    }
-
-    @Override
-    public int size() {
-        return 0;
-    }
-
-    @Override
-    public Iterator<Edge<ObjectWritable, NullWritable>> iterator() {
-        return Collections.emptyIterator();
-    }
-
-    @Override
-    public void write(final DataOutput dataOutput) throws IOException {
-    }
-
-    @Override
-    public void readFields(final DataInput dataInput) throws IOException {
-    }
-}

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/giraph-gremlin/src/main/java/org/apache/tinkerpop/gremlin/giraph/process/computer/GiraphComputation.java
----------------------------------------------------------------------
diff --git 
a/giraph-gremlin/src/main/java/org/apache/tinkerpop/gremlin/giraph/process/computer/GiraphComputation.java
 
b/giraph-gremlin/src/main/java/org/apache/tinkerpop/gremlin/giraph/process/computer/GiraphComputation.java
deleted file mode 100644
index 1d52566..0000000
--- 
a/giraph-gremlin/src/main/java/org/apache/tinkerpop/gremlin/giraph/process/computer/GiraphComputation.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.tinkerpop.gremlin.giraph.process.computer;
-
-import org.apache.giraph.graph.BasicComputation;
-import org.apache.giraph.graph.Vertex;
-import org.apache.hadoop.io.NullWritable;
-import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable;
-import org.apache.tinkerpop.gremlin.hadoop.structure.io.VertexWritable;
-import org.apache.tinkerpop.gremlin.process.computer.VertexProgram;
-import org.apache.tinkerpop.gremlin.process.computer.util.ComputerGraph;
-
-import java.io.IOException;
-
-/**
- * @author Marko A. Rodriguez (http://markorodriguez.com)
- */
-public final class GiraphComputation extends BasicComputation<ObjectWritable, 
VertexWritable, NullWritable, ObjectWritable> {
-
-    public GiraphComputation() {
-    }
-
-    @Override
-    public void compute(final Vertex<ObjectWritable, VertexWritable, 
NullWritable> vertex, final Iterable<ObjectWritable> messages) throws 
IOException {
-        final GiraphWorkerContext workerContext = this.getWorkerContext();
-        final VertexProgram<?> vertexProgram = 
workerContext.getVertexProgramPool().take();
-        
vertexProgram.execute(ComputerGraph.vertexProgram(vertex.getValue().get(), 
vertexProgram), workerContext.getMessenger((GiraphVertex) vertex, this, 
messages.iterator()), workerContext.getMemory());
-        workerContext.getVertexProgramPool().offer(vertexProgram);
-    }
-}

http://git-wip-us.apache.org/repos/asf/tinkerpop/blob/33bfe547/giraph-gremlin/src/main/java/org/apache/tinkerpop/gremlin/giraph/process/computer/GiraphGraphComputer.java
----------------------------------------------------------------------
diff --git 
a/giraph-gremlin/src/main/java/org/apache/tinkerpop/gremlin/giraph/process/computer/GiraphGraphComputer.java
 
b/giraph-gremlin/src/main/java/org/apache/tinkerpop/gremlin/giraph/process/computer/GiraphGraphComputer.java
deleted file mode 100644
index 1be548a..0000000
--- 
a/giraph-gremlin/src/main/java/org/apache/tinkerpop/gremlin/giraph/process/computer/GiraphGraphComputer.java
+++ /dev/null
@@ -1,303 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.tinkerpop.gremlin.giraph.process.computer;
-
-import org.apache.commons.configuration.BaseConfiguration;
-import org.apache.commons.configuration.Configuration;
-import org.apache.commons.configuration.FileConfiguration;
-import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.giraph.conf.GiraphConfiguration;
-import org.apache.giraph.conf.GiraphConstants;
-import org.apache.giraph.job.GiraphJob;
-import org.apache.hadoop.filecache.DistributedCache;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapreduce.Cluster;
-import org.apache.hadoop.mapreduce.InputFormat;
-import org.apache.hadoop.mapreduce.OutputFormat;
-import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
-import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.apache.hadoop.util.Tool;
-import org.apache.hadoop.util.ToolRunner;
-import 
org.apache.tinkerpop.gremlin.giraph.structure.io.GiraphVertexInputFormat;
-import 
org.apache.tinkerpop.gremlin.giraph.structure.io.GiraphVertexOutputFormat;
-import org.apache.tinkerpop.gremlin.hadoop.Constants;
-import 
org.apache.tinkerpop.gremlin.hadoop.process.computer.AbstractHadoopGraphComputer;
-import 
org.apache.tinkerpop.gremlin.hadoop.process.computer.util.ComputerSubmissionHelper;
-import 
org.apache.tinkerpop.gremlin.hadoop.process.computer.util.MapReduceHelper;
-import org.apache.tinkerpop.gremlin.hadoop.structure.HadoopGraph;
-import org.apache.tinkerpop.gremlin.hadoop.structure.io.FileSystemStorage;
-import org.apache.tinkerpop.gremlin.hadoop.structure.io.GraphFilterAware;
-import org.apache.tinkerpop.gremlin.hadoop.structure.io.HadoopPoolShimService;
-import org.apache.tinkerpop.gremlin.hadoop.structure.io.InputOutputHelper;
-import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable;
-import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritableIterator;
-import org.apache.tinkerpop.gremlin.hadoop.structure.io.VertexWritable;
-import org.apache.tinkerpop.gremlin.hadoop.structure.util.ConfUtil;
-import org.apache.tinkerpop.gremlin.process.computer.ComputerResult;
-import org.apache.tinkerpop.gremlin.process.computer.GraphComputer;
-import org.apache.tinkerpop.gremlin.process.computer.MapReduce;
-import org.apache.tinkerpop.gremlin.process.computer.MemoryComputeKey;
-import org.apache.tinkerpop.gremlin.process.computer.VertexProgram;
-import 
org.apache.tinkerpop.gremlin.process.computer.util.DefaultComputerResult;
-import org.apache.tinkerpop.gremlin.process.computer.util.MapMemory;
-import org.apache.tinkerpop.gremlin.structure.io.Storage;
-import 
org.apache.tinkerpop.gremlin.structure.io.gryo.kryoshim.KryoShimServiceLoader;
-import org.apache.tinkerpop.gremlin.util.Gremlin;
-import org.apache.tinkerpop.gremlin.util.iterator.IteratorUtils;
-
-import java.io.File;
-import java.io.IOException;
-import java.io.NotSerializableException;
-import java.net.URI;
-import java.util.HashSet;
-import java.util.Set;
-import java.util.concurrent.CompletableFuture;
-import java.util.concurrent.Executor;
-import java.util.concurrent.Future;
-
-/**
- * @author Marko A. Rodriguez (http://markorodriguez.com)
- */
-public final class GiraphGraphComputer extends AbstractHadoopGraphComputer 
implements GraphComputer, Tool {
-
-    protected GiraphConfiguration giraphConfiguration = new 
GiraphConfiguration();
-    private MapMemory memory = new MapMemory();
-    private boolean useWorkerThreadsInConfiguration;
-    private Set<String> vertexProgramConfigurationKeys = new HashSet<>();
-
-    public GiraphGraphComputer(final HadoopGraph hadoopGraph) {
-        super(hadoopGraph);
-        final Configuration configuration = hadoopGraph.configuration();
-        configuration.getKeys().forEachRemaining(key -> 
this.giraphConfiguration.set(key, configuration.getProperty(key).toString()));
-        this.giraphConfiguration.setMasterComputeClass(GiraphMemory.class);
-        this.giraphConfiguration.setVertexClass(GiraphVertex.class);
-        this.giraphConfiguration.setComputationClass(GiraphComputation.class);
-        
this.giraphConfiguration.setWorkerContextClass(GiraphWorkerContext.class);
-        this.giraphConfiguration.setOutEdgesClass(EmptyOutEdges.class);
-        
this.giraphConfiguration.setClass(GiraphConstants.VERTEX_ID_CLASS.getKey(), 
ObjectWritable.class, ObjectWritable.class);
-        
this.giraphConfiguration.setClass(GiraphConstants.VERTEX_VALUE_CLASS.getKey(), 
VertexWritable.class, VertexWritable.class);
-        
this.giraphConfiguration.setBoolean(GiraphConstants.STATIC_GRAPH.getKey(), 
true);
-        
this.giraphConfiguration.setVertexInputFormatClass(GiraphVertexInputFormat.class);
-        
this.giraphConfiguration.setVertexOutputFormatClass(GiraphVertexOutputFormat.class);
-        this.useWorkerThreadsInConfiguration = 
this.giraphConfiguration.getInt(GiraphConstants.MAX_WORKERS, -666) != -666 || 
this.giraphConfiguration.getInt(GiraphConstants.NUM_COMPUTE_THREADS.getKey(), 
-666) != -666;
-    }
-
-    @Override
-    public GraphComputer workers(final int workers) {
-        this.useWorkerThreadsInConfiguration = false;
-        return super.workers(workers);
-    }
-
-    @Override
-    public GraphComputer configure(final String key, final Object value) {
-        this.giraphConfiguration.set(key, value.toString());
-        this.useWorkerThreadsInConfiguration = 
this.giraphConfiguration.getInt(GiraphConstants.MAX_WORKERS, -666) != -666 || 
this.giraphConfiguration.getInt(GiraphConstants.NUM_COMPUTE_THREADS.getKey(), 
-666) != -666;
-        return this;
-    }
-
-    @Override
-    public GraphComputer program(final VertexProgram vertexProgram) {
-        super.program(vertexProgram);
-        this.memory.addVertexProgramMemoryComputeKeys(this.vertexProgram);
-        final BaseConfiguration apacheConfiguration = new BaseConfiguration();
-        apacheConfiguration.setDelimiterParsingDisabled(true);
-        vertexProgram.storeState(apacheConfiguration);
-        IteratorUtils.fill(apacheConfiguration.getKeys(), 
this.vertexProgramConfigurationKeys);
-        ConfUtil.mergeApacheIntoHadoopConfiguration(apacheConfiguration, 
this.giraphConfiguration);
-        this.vertexProgram.getMessageCombiner().ifPresent(combiner -> 
this.giraphConfiguration.setMessageCombinerClass(GiraphMessageCombiner.class));
-        return this;
-    }
-
-    @Override
-    public Future<ComputerResult> submit() {
-        super.validateStatePriorToExecution();
-        return 
ComputerSubmissionHelper.runWithBackgroundThread(this::submitWithExecutor, 
"GiraphSubmitter");
-    }
-
-    private Future<ComputerResult> submitWithExecutor(final Executor exec) {
-        final long startTime = System.currentTimeMillis();
-        final Configuration apacheConfiguration = 
ConfUtil.makeApacheConfiguration(this.giraphConfiguration);
-        return CompletableFuture.<ComputerResult>supplyAsync(() -> {
-            try {
-                this.loadJars(giraphConfiguration);
-                ToolRunner.run(this, new String[]{});
-            } catch (final Exception e) {
-                //e.printStackTrace();
-                throw new IllegalStateException(e.getMessage(), e);
-            }
-            this.memory.setRuntime(System.currentTimeMillis() - startTime);
-            // clear properties that should not be propagated in an OLAP chain
-            
apacheConfiguration.clearProperty(Constants.GREMLIN_HADOOP_GRAPH_FILTER);
-            
apacheConfiguration.clearProperty(Constants.GREMLIN_HADOOP_VERTEX_PROGRAM_INTERCEPTOR);
-            
this.vertexProgramConfigurationKeys.forEach(apacheConfiguration::clearProperty);
 // clear out vertex program specific configurations
-            return new 
DefaultComputerResult(InputOutputHelper.getOutputGraph(apacheConfiguration, 
this.resultGraph, this.persist), this.memory.asImmutable());
-        }, exec);
-    }
-
-    @Override
-    public int run(final String[] args) {
-        final Storage storage = 
FileSystemStorage.open(this.giraphConfiguration);
-        
storage.rm(this.giraphConfiguration.get(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION));
-        
this.giraphConfiguration.setBoolean(Constants.GREMLIN_HADOOP_GRAPH_WRITER_HAS_EDGES,
 this.persist.equals(Persist.EDGES));
-        try {
-            // store vertex and edge filters (will propagate down to native 
InputFormat or else GiraphVertexInputFormat will process)
-            final BaseConfiguration apacheConfiguration = new 
BaseConfiguration();
-            apacheConfiguration.setDelimiterParsingDisabled(true);
-            GraphFilterAware.storeGraphFilter(apacheConfiguration, 
this.giraphConfiguration, this.graphFilter);
-
-            // it is possible to run graph computer without a vertex program 
(and thus, only map reduce jobs if they exist)
-            if (null != this.vertexProgram) {
-                // a way to verify in Giraph whether the traversal will go 
over the wire or not
-                try {
-                    VertexProgram.createVertexProgram(this.hadoopGraph, 
ConfUtil.makeApacheConfiguration(this.giraphConfiguration));
-                } catch (final IllegalStateException e) {
-                    if (e.getCause() instanceof NumberFormatException)
-                        throw new NotSerializableException("The provided 
traversal is not serializable and thus, can not be distributed across the 
cluster");
-                }
-                // remove historic combiners in configuration propagation 
(this occurs when job chaining)
-                if (!this.vertexProgram.getMessageCombiner().isPresent())
-                    
this.giraphConfiguration.unset(GiraphConstants.MESSAGE_COMBINER_CLASS.getKey());
-                // split required workers across system (open map slots + max 
threads per machine = total amount of TinkerPop workers)
-                if (!this.useWorkerThreadsInConfiguration) {
-                    final Cluster cluster = new 
Cluster(GiraphGraphComputer.this.giraphConfiguration);
-                    int totalMappers = 
cluster.getClusterStatus().getMapSlotCapacity() - 1; // 1 is needed for master
-                    cluster.close();
-                    if (this.workers <= totalMappers) {
-                        
this.giraphConfiguration.setWorkerConfiguration(this.workers, this.workers, 
100.0F);
-                        this.giraphConfiguration.setNumComputeThreads(1);
-                    } else {
-                        if (totalMappers == 0) totalMappers = 1; // happens in 
local mode
-                        int threadsPerMapper = 
Long.valueOf(Math.round((double) this.workers / (double) 
totalMappers)).intValue(); // TODO: need to find least common denominator
-                        
this.giraphConfiguration.setWorkerConfiguration(totalMappers, totalMappers, 
100.0F);
-                        
this.giraphConfiguration.setNumComputeThreads(threadsPerMapper);
-                    }
-                }
-                // prepare the giraph vertex-centric computing job
-                final GiraphJob job = new GiraphJob(this.giraphConfiguration, 
Constants.GREMLIN_HADOOP_GIRAPH_JOB_PREFIX + this.vertexProgram);
-                job.getInternalJob().setJarByClass(GiraphGraphComputer.class);
-                this.logger.info(Constants.GREMLIN_HADOOP_GIRAPH_JOB_PREFIX + 
this.vertexProgram);
-                // handle input paths (if any)
-                String inputLocation = 
this.giraphConfiguration.get(Constants.GREMLIN_HADOOP_INPUT_LOCATION, null);
-                if (null != inputLocation && 
FileInputFormat.class.isAssignableFrom(this.giraphConfiguration.getClass(Constants.GREMLIN_HADOOP_GRAPH_READER,
 InputFormat.class))) {
-                    inputLocation = 
Constants.getSearchGraphLocation(inputLocation, 
storage).orElse(this.giraphConfiguration.get(Constants.GREMLIN_HADOOP_INPUT_LOCATION));
-                    FileInputFormat.setInputPaths(job.getInternalJob(), new 
Path(inputLocation));
-                }
-                // handle output paths (if any)
-                String outputLocation = 
this.giraphConfiguration.get(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION, null);
-                if (null != outputLocation && 
FileOutputFormat.class.isAssignableFrom(this.giraphConfiguration.getClass(Constants.GREMLIN_HADOOP_GRAPH_WRITER,
 OutputFormat.class))) {
-                    outputLocation = 
Constants.getGraphLocation(this.giraphConfiguration.get(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION));
-                    FileOutputFormat.setOutputPath(job.getInternalJob(), new 
Path(outputLocation));
-                }
-                // execute the job and wait until it completes (if it fails, 
throw an exception)
-                if (!job.run(true))
-                    throw new IllegalStateException("The GiraphGraphComputer 
job failed -- aborting all subsequent MapReduce jobs: " + 
job.getInternalJob().getStatus().getFailureInfo());
-                // add vertex program memory values to the return memory
-                for (final MemoryComputeKey memoryComputeKey : 
this.vertexProgram.getMemoryComputeKeys()) {
-                    if (!memoryComputeKey.isTransient() && 
storage.exists(Constants.getMemoryLocation(this.giraphConfiguration.get(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION),
 memoryComputeKey.getKey()))) {
-                        final ObjectWritableIterator iterator = new 
ObjectWritableIterator(this.giraphConfiguration, new 
Path(Constants.getMemoryLocation(this.giraphConfiguration.get(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION),
 memoryComputeKey.getKey())));
-                        if (iterator.hasNext()) {
-                            this.memory.set(memoryComputeKey.getKey(), 
iterator.next().getValue());
-                        }
-                        // vertex program memory items are not stored on disk
-                        
storage.rm(Constants.getMemoryLocation(this.giraphConfiguration.get(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION),
 memoryComputeKey.getKey()));
-                    }
-                }
-                final Path path = new 
Path(Constants.getMemoryLocation(this.giraphConfiguration.get(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION),
 Constants.HIDDEN_ITERATION));
-                this.memory.setIteration((Integer) new 
ObjectWritableIterator(this.giraphConfiguration, path).next().getValue());
-                
storage.rm(Constants.getMemoryLocation(this.giraphConfiguration.get(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION),
 Constants.HIDDEN_ITERATION));
-            }
-            // do map reduce jobs
-            
this.giraphConfiguration.setBoolean(Constants.GREMLIN_HADOOP_GRAPH_READER_HAS_EDGES,
 
this.giraphConfiguration.getBoolean(Constants.GREMLIN_HADOOP_GRAPH_WRITER_HAS_EDGES,
 true));
-            for (final MapReduce mapReduce : this.mapReducers) {
-                this.memory.addMapReduceMemoryKey(mapReduce);
-                MapReduceHelper.executeMapReduceJob(mapReduce, this.memory, 
this.giraphConfiguration);
-            }
-
-            // if no persistence, delete the graph and memory output
-            if (this.persist.equals(Persist.NOTHING))
-                
storage.rm(this.giraphConfiguration.get(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION));
-        } catch (final Exception e) {
-            throw new IllegalStateException(e.getMessage(), e);
-        }
-        return 0;
-    }
-
-    @Override
-    public void setConf(final org.apache.hadoop.conf.Configuration 
configuration) {
-        // TODO: is this necessary to implement?
-    }
-
-    @Override
-    public org.apache.hadoop.conf.Configuration getConf() {
-        return this.giraphConfiguration;
-    }
-
-    @Override
-    protected void loadJar(final org.apache.hadoop.conf.Configuration 
hadoopConfiguration, final File file, final Object... params)
-            throws IOException {
-        final FileSystem defaultFileSystem = 
FileSystem.get(hadoopConfiguration);
-        try {
-            final Path jarFile = new Path(defaultFileSystem.getHomeDirectory() 
+ "/hadoop-gremlin-" + Gremlin.version() + "-libs/" + file.getName());
-            if (!defaultFileSystem.exists(jarFile)) {
-                final Path sourcePath = new Path(file.getPath());
-                final URI sourceUri = sourcePath.toUri();
-                final FileSystem fs = FileSystem.get(sourceUri, 
hadoopConfiguration);
-                fs.copyFromLocalFile(sourcePath, jarFile);
-            }
-            try {
-                DistributedCache.addArchiveToClassPath(jarFile, 
this.giraphConfiguration, defaultFileSystem);
-            } catch (final Exception e) {
-                throw new RuntimeException(e.getMessage(), e);
-            }
-        } catch (final Exception e) {
-            throw new IllegalStateException(e.getMessage(), e);
-        }
-    }
-
-    public static void main(final String[] args) throws Exception {
-        final FileConfiguration configuration = new 
PropertiesConfiguration(args[0]);
-        new 
GiraphGraphComputer(HadoopGraph.open(configuration)).program(VertexProgram.createVertexProgram(HadoopGraph.open(configuration),
 configuration)).submit().get();
-    }
-
-    public Features features() {
-        return new Features();
-    }
-
-    public class Features extends AbstractHadoopGraphComputer.Features {
-
-        @Override
-        public int getMaxWorkers() {
-            if 
(GiraphGraphComputer.this.giraphConfiguration.getLocalTestMode())
-                return Runtime.getRuntime().availableProcessors();
-            else {
-                return Integer.MAX_VALUE;
-                /*try {
-                    final Cluster cluster = new 
Cluster(GiraphGraphComputer.this.giraphConfiguration);
-                    int maxWorkers = 
(cluster.getClusterStatus().getMapSlotCapacity() - 1) * 16; // max 16 threads 
per machine hardcoded :|
-                    cluster.close();
-                    return maxWorkers;
-
-                } catch (final IOException | InterruptedException e) {
-                    throw new IllegalStateException(e.getMessage(), e);
-                }*/
-            }
-        }
-    }
-}

Reply via email to