http://git-wip-us.apache.org/repos/asf/spark-website/blob/24d32b75/site/docs/1.6.3/api/java/org/apache/spark/graphx/Graph.html ---------------------------------------------------------------------- diff --git a/site/docs/1.6.3/api/java/org/apache/spark/graphx/Graph.html b/site/docs/1.6.3/api/java/org/apache/spark/graphx/Graph.html new file mode 100644 index 0000000..7213e80 --- /dev/null +++ b/site/docs/1.6.3/api/java/org/apache/spark/graphx/Graph.html @@ -0,0 +1,983 @@ +<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> +<!-- NewPage --> +<html lang="en"> +<head> +<!-- Generated by javadoc (version 1.7.0_79) on Wed Nov 02 15:16:13 PDT 2016 --> +<title>Graph (Spark 1.6.3 JavaDoc)</title> +<meta name="date" content="2016-11-02"> +<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style"> +</head> +<body> +<script type="text/javascript"><!-- + if (location.href.indexOf('is-external=true') == -1) { + parent.document.title="Graph (Spark 1.6.3 JavaDoc)"; + } +//--> +</script> +<noscript> +<div>JavaScript is disabled on your browser.</div> +</noscript> +<!-- ========= START OF TOP NAVBAR ======= --> +<div class="topNav"><a name="navbar_top"> +<!-- --> +</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> +<!-- --> +</a> +<ul class="navList" title="Navigation"> +<li><a href="../../../../overview-summary.html">Overview</a></li> +<li><a href="package-summary.html">Package</a></li> +<li class="navBarCell1Rev">Class</li> +<li><a href="package-tree.html">Tree</a></li> +<li><a href="../../../../deprecated-list.html">Deprecated</a></li> +<li><a href="../../../../index-all.html">Index</a></li> +<li><a href="../../../../help-doc.html">Help</a></li> +</ul> +</div> +<div class="subNav"> +<ul class="navList"> +<li><a href="../../../../org/apache/spark/graphx/EdgeTriplet.html" title="class in org.apache.spark.graphx"><span class="strong">Prev Class</span></a></li> +<li><a href="../../../../org/apache/spark/graphx/GraphKryoRegistrator.html" title="class in org.apache.spark.graphx"><span class="strong">Next Class</span></a></li> +</ul> +<ul class="navList"> +<li><a href="../../../../index.html?org/apache/spark/graphx/Graph.html" target="_top">Frames</a></li> +<li><a href="Graph.html" target="_top">No Frames</a></li> +</ul> +<ul class="navList" id="allclasses_navbar_top"> +<li><a href="../../../../allclasses-noframe.html">All Classes</a></li> +</ul> +<div> +<script type="text/javascript"><!-- + allClassesLink = document.getElementById("allclasses_navbar_top"); + if(window==top) { + allClassesLink.style.display = "block"; + } + else { + allClassesLink.style.display = "none"; + } + //--> +</script> +</div> +<div> +<ul class="subNavList"> +<li>Summary: </li> +<li>Nested | </li> +<li>Field | </li> +<li>Constr | </li> +<li><a href="#method_summary">Method</a></li> +</ul> +<ul class="subNavList"> +<li>Detail: </li> +<li>Field | </li> +<li>Constr | </li> +<li><a href="#method_detail">Method</a></li> +</ul> +</div> +<a name="skip-navbar_top"> +<!-- --> +</a></div> +<!-- ========= END OF TOP NAVBAR ========= --> +<!-- ======== START OF CLASS DATA ======== --> +<div class="header"> +<div class="subTitle">org.apache.spark.graphx</div> +<h2 title="Class Graph" class="title">Class Graph<VD,ED></h2> +</div> +<div class="contentContainer"> +<ul class="inheritance"> +<li>Object</li> +<li> +<ul class="inheritance"> +<li>org.apache.spark.graphx.Graph<VD,ED></li> +</ul> +</li> +</ul> +<div class="description"> +<ul class="blockList"> +<li class="blockList"> +<dl> +<dt>All Implemented Interfaces:</dt> +<dd>java.io.Serializable</dd> +</dl> +<dl> +<dt>Direct Known Subclasses:</dt> +<dd><a href="../../../../org/apache/spark/graphx/impl/GraphImpl.html" title="class in org.apache.spark.graphx.impl">GraphImpl</a></dd> +</dl> +<hr> +<br> +<pre>public abstract class <span class="strong">Graph<VD,ED></span> +extends Object +implements scala.Serializable</pre> +<div class="block">The Graph abstractly represents a graph with arbitrary objects + associated with vertices and edges. The graph provides basic + operations to access and manipulate the data associated with + vertices and edges as well as the underlying structure. Like Spark + RDDs, the graph is a functional data-structure in which mutating + operations return new graphs. + <p></div> +<dl><dt><span class="strong">See Also:</span></dt><dd><a href="../../../../serialized-form.html#org.apache.spark.graphx.Graph">Serialized Form</a></dd></dl> +</li> +</ul> +</div> +<div class="summary"> +<ul class="blockList"> +<li class="blockList"> +<!-- ========== METHOD SUMMARY =========== --> +<ul class="blockList"> +<li class="blockList"><a name="method_summary"> +<!-- --> +</a> +<h3>Method Summary</h3> +<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation"> +<caption><span>Methods</span><span class="tabEnd"> </span></caption> +<tr> +<th class="colFirst" scope="col">Modifier and Type</th> +<th class="colLast" scope="col">Method and Description</th> +</tr> +<tr class="altColor"> +<td class="colFirst"><code><A> <a href="../../../../org/apache/spark/graphx/VertexRDD.html" title="class in org.apache.spark.graphx">VertexRDD</a><A></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#aggregateMessages(scala.Function1,%20scala.Function2,%20org.apache.spark.graphx.TripletFields,%20scala.reflect.ClassTag)">aggregateMessages</a></strong>(scala.Function1<<a href="../../../../org/apache/spark/graphx/EdgeContext.html" title="class in org.apache.spark.graphx">EdgeContext</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>,A>,scala.runtime.BoxedUnit> sendMsg, + scala.Function2<A,A,A> mergeMsg, + <a href="../../../../org/apache/spark/graphx/TripletFields.html" title="class in org.apache.spark.graphx">TripletFields</a> tripletFields, + scala.reflect.ClassTag<A> evidence$12)</code> +<div class="block">Aggregates values from the neighboring edges and vertices of each vertex.</div> +</td> +</tr> +<tr class="rowColor"> +<td class="colFirst"><code>static <VD,ED> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><VD,ED></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#apply(org.apache.spark.rdd.RDD,%20org.apache.spark.rdd.RDD,%20VD,%20org.apache.spark.storage.StorageLevel,%20org.apache.spark.storage.StorageLevel,%20scala.reflect.ClassTag,%20scala.reflect.ClassTag)">apply</a></strong>(<a href="../../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a><scala.Tuple2<Object,VD>> vertices, + <a href="../../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a><<a href="../../../../org/apache/spark/graphx/Edge.html" title="class in org.apache.spark.graphx">Edge</a><ED>> edges, + VD defaultVertexAttr, + <a href="../../../../org/apache/spark/storage/StorageLevel.html" title="class in org.apache.spark.storage">StorageLevel</a> edgeStorageLevel, + <a href="../../../../org/apache/spark/storage/StorageLevel.html" title="class in org.apache.spark.storage">StorageLevel</a> vertexStorageLevel, + scala.reflect.ClassTag<VD> evidence$19, + scala.reflect.ClassTag<ED> evidence$20)</code> +<div class="block">Construct a graph from a collection of vertices and + edges with attributes.</div> +</td> +</tr> +<tr class="altColor"> +<td class="colFirst"><code>abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#cache()">cache</a></strong>()</code> +<div class="block">Caches the vertices and edges associated with this graph at the previously-specified target + storage levels, which default to <code>MEMORY_ONLY</code>.</div> +</td> +</tr> +<tr class="rowColor"> +<td class="colFirst"><code>abstract void</code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#checkpoint()">checkpoint</a></strong>()</code> +<div class="block">Mark this Graph for checkpointing.</div> +</td> +</tr> +<tr class="altColor"> +<td class="colFirst"><code>abstract <a href="../../../../org/apache/spark/graphx/EdgeRDD.html" title="class in org.apache.spark.graphx">EdgeRDD</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#edges()">edges</a></strong>()</code> +<div class="block">An RDD containing the edges and their associated attributes.</div> +</td> +</tr> +<tr class="rowColor"> +<td class="colFirst"><code>static <VD,ED> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><VD,ED></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#fromEdges(org.apache.spark.rdd.RDD,%20VD,%20org.apache.spark.storage.StorageLevel,%20org.apache.spark.storage.StorageLevel,%20scala.reflect.ClassTag,%20scala.reflect.ClassTag)">fromEdges</a></strong>(<a href="../../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a><<a href="../../../../org/apache/spark/graphx/Edge.html" title="class in org.apache.spark.graphx">Edge</a><ED>> edges, + VD defaultValue, + <a href="../../../../org/apache/spark/storage/StorageLevel.html" title="class in org.apache.spark.storage">StorageLevel</a> edgeStorageLevel, + <a href="../../../../org/apache/spark/storage/StorageLevel.html" title="class in org.apache.spark.storage">StorageLevel</a> vertexStorageLevel, + scala.reflect.ClassTag<VD> evidence$17, + scala.reflect.ClassTag<ED> evidence$18)</code> +<div class="block">Construct a graph from a collection of edges.</div> +</td> +</tr> +<tr class="altColor"> +<td class="colFirst"><code>static <VD> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><VD,Object></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#fromEdgeTuples(org.apache.spark.rdd.RDD,%20VD,%20scala.Option,%20org.apache.spark.storage.StorageLevel,%20org.apache.spark.storage.StorageLevel,%20scala.reflect.ClassTag)">fromEdgeTuples</a></strong>(<a href="../../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a><scala.Tuple2<Object,Object>> rawEdges, + VD defaultValue, + scala.Option<<a href="../../../../org/apache/spark/graphx/PartitionStrategy.html" title="interface in org.apache.spark.graphx">PartitionStrategy</a>> uniqueEdges, + <a href="../../../../org/apache/spark/storage/StorageLevel.html" title="class in org.apache.spark.storage">StorageLevel</a> edgeStorageLevel, + <a href="../../../../org/apache/spark/storage/StorageLevel.html" title="class in org.apache.spark.storage">StorageLevel</a> vertexStorageLevel, + scala.reflect.ClassTag<VD> evidence$16)</code> +<div class="block">Construct a graph from a collection of edges encoded as vertex id pairs.</div> +</td> +</tr> +<tr class="rowColor"> +<td class="colFirst"><code>abstract scala.collection.Seq<String></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#getCheckpointFiles()">getCheckpointFiles</a></strong>()</code> +<div class="block">Gets the name of the files to which this Graph was checkpointed.</div> +</td> +</tr> +<tr class="altColor"> +<td class="colFirst"><code>static <VD,ED> <a href="../../../../org/apache/spark/graphx/GraphOps.html" title="class in org.apache.spark.graphx">GraphOps</a><VD,ED></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#graphToGraphOps(org.apache.spark.graphx.Graph,%20scala.reflect.ClassTag,%20scala.reflect.ClassTag)">graphToGraphOps</a></strong>(<a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><VD,ED> g, + scala.reflect.ClassTag<VD> evidence$21, + scala.reflect.ClassTag<ED> evidence$22)</code> +<div class="block">Implicitly extracts the <a href="../../../../org/apache/spark/graphx/GraphOps.html" title="class in org.apache.spark.graphx"><code>GraphOps</code></a> member from a graph.</div> +</td> +</tr> +<tr class="rowColor"> +<td class="colFirst"><code>abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#groupEdges(scala.Function2)">groupEdges</a></strong>(scala.Function2<<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>> merge)</code> +<div class="block">Merges multiple edges between two vertices into a single edge.</div> +</td> +</tr> +<tr class="altColor"> +<td class="colFirst"><code>abstract boolean</code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#isCheckpointed()">isCheckpointed</a></strong>()</code> +<div class="block">Return whether this Graph has been checkpointed or not.</div> +</td> +</tr> +<tr class="rowColor"> +<td class="colFirst"><code><ED2> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,ED2></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#mapEdges(scala.Function1,%20scala.reflect.ClassTag)">mapEdges</a></strong>(scala.Function1<<a href="../../../../org/apache/spark/graphx/Edge.html" title="class in org.apache.spark.graphx">Edge</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>>,ED2> map, + scala.reflect.ClassTag<ED2> evidence$4)</code> +<div class="block">Transforms each edge attribute in the graph using the map function.</div> +</td> +</tr> +<tr class="altColor"> +<td class="colFirst"><code>abstract <ED2> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,ED2></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#mapEdges(scala.Function2,%20scala.reflect.ClassTag)">mapEdges</a></strong>(scala.Function2<Object,scala.collection.Iterator<<a href="../../../../org/apache/spark/graphx/Edge.html" title="class in org.apache.spark.graphx">Edge</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>>>,scala.collection.Iterator<ED2>> map, + scala.reflect.ClassTag<ED2> evidence$5)</code> +<div class="block">Transforms each edge attribute using the map function, passing it a whole partition at a + time.</div> +</td> +</tr> +<tr class="rowColor"> +<td class="colFirst"><code>abstract <A> <a href="../../../../org/apache/spark/graphx/VertexRDD.html" title="class in org.apache.spark.graphx">VertexRDD</a><A></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#mapReduceTriplets(scala.Function1,%20scala.Function2,%20scala.Option,%20scala.reflect.ClassTag)">mapReduceTriplets</a></strong>(scala.Function1<<a href="../../../../org/apache/spark/graphx/EdgeTriplet.html" title="class in org.apache.spark.graphx">EdgeTriplet</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>>,scala.collection.Iterator<scala.Tuple2<Object,A>>> mapFunc, + scala.Function2<A,A,A> reduceFunc, + scala.Option<scala.Tuple2<<a href="../../../../org/apache/spark/graphx/VertexRDD.html" title="class in org.apache.spark.graphx">VertexRDD</a><?>,<a href="../../../../org/apache/spark/graphx/EdgeDirection.html" title="class in org.apache.spark.graphx">EdgeDirection</a>>> activeSetOpt, + scala.reflect.ClassTag<A> evidence$11)</code> +<div class="block">Aggregates values from the neighboring edges and vertices of each vertex.</div> +</td> +</tr> +<tr class="altColor"> +<td class="colFirst"><code><ED2> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,ED2></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#mapTriplets(scala.Function1,%20scala.reflect.ClassTag)">mapTriplets</a></strong>(scala.Function1<<a href="../../../../org/apache/spark/graphx/EdgeTriplet.html" title="class in org.apache.spark.graphx">EdgeTriplet</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>>,ED2> map, + scala.reflect.ClassTag<ED2> evidence$6)</code> +<div class="block">Transforms each edge attribute using the map function, passing it the adjacent vertex + attributes as well.</div> +</td> +</tr> +<tr class="rowColor"> +<td class="colFirst"><code><ED2> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,ED2></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#mapTriplets(scala.Function1,%20org.apache.spark.graphx.TripletFields,%20scala.reflect.ClassTag)">mapTriplets</a></strong>(scala.Function1<<a href="../../../../org/apache/spark/graphx/EdgeTriplet.html" title="class in org.apache.spark.graphx">EdgeTriplet</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>>,ED2> map, + <a href="../../../../org/apache/spark/graphx/TripletFields.html" title="class in org.apache.spark.graphx">TripletFields</a> tripletFields, + scala.reflect.ClassTag<ED2> evidence$7)</code> +<div class="block">Transforms each edge attribute using the map function, passing it the adjacent vertex + attributes as well.</div> +</td> +</tr> +<tr class="altColor"> +<td class="colFirst"><code>abstract <ED2> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,ED2></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#mapTriplets(scala.Function2,%20org.apache.spark.graphx.TripletFields,%20scala.reflect.ClassTag)">mapTriplets</a></strong>(scala.Function2<Object,scala.collection.Iterator<<a href="../../../../org/apache/spark/graphx/EdgeTriplet.html" title="class in org.apache.spark.graphx">EdgeTriplet</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>>>,scala.collection.Iterator<ED2>> map, + <a href="../../../../org/apache/spark/graphx/TripletFields.html" title="class in org.apache.spark.graphx">TripletFields</a> tripletFields, + scala.reflect.ClassTag<ED2> evidence$8)</code> +<div class="block">Transforms each edge attribute a partition at a time using the map function, passing it the + adjacent vertex attributes as well.</div> +</td> +</tr> +<tr class="rowColor"> +<td class="colFirst"><code>abstract <VD2> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><VD2,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#mapVertices(scala.Function2,%20scala.reflect.ClassTag,%20scala.Predef.$eq$colon$eq)">mapVertices</a></strong>(scala.Function2<Object,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,VD2> map, + scala.reflect.ClassTag<VD2> evidence$3, + scala.Predef.$eq$colon$eq<<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,VD2> eq)</code> +<div class="block">Transforms each vertex attribute in the graph using the map function.</div> +</td> +</tr> +<tr class="altColor"> +<td class="colFirst"><code>abstract <VD2,ED2> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#mask(org.apache.spark.graphx.Graph,%20scala.reflect.ClassTag,%20scala.reflect.ClassTag)">mask</a></strong>(<a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><VD2,ED2> other, + scala.reflect.ClassTag<VD2> evidence$9, + scala.reflect.ClassTag<ED2> evidence$10)</code> +<div class="block">Restricts the graph to only the vertices and edges that are also in <code>other</code>, but keeps the + attributes from this graph.</div> +</td> +</tr> +<tr class="rowColor"> +<td class="colFirst"><code><a href="../../../../org/apache/spark/graphx/GraphOps.html" title="class in org.apache.spark.graphx">GraphOps</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#ops()">ops</a></strong>()</code> +<div class="block">The associated <a href="../../../../org/apache/spark/graphx/GraphOps.html" title="class in org.apache.spark.graphx"><code>GraphOps</code></a> object.</div> +</td> +</tr> +<tr class="altColor"> +<td class="colFirst"><code>abstract <U,VD2> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><VD2,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#outerJoinVertices(org.apache.spark.rdd.RDD,%20scala.Function3,%20scala.reflect.ClassTag,%20scala.reflect.ClassTag,%20scala.Predef.$eq$colon$eq)">outerJoinVertices</a></strong>(<a href="../../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a><scala.Tuple2<Object,U>> other, + scala.Function3<Object,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,scala.Option<U>,VD2> mapFunc, + scala.reflect.ClassTag<U> evidence$14, + scala.reflect.ClassTag<VD2> evidence$15, + scala.Predef.$eq$colon$eq<<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,VD2> eq)</code> +<div class="block">Joins the vertices with entries in the <code>table</code> RDD and merges the results using <code>mapFunc</code>.</div> +</td> +</tr> +<tr class="rowColor"> +<td class="colFirst"><code>abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#partitionBy(org.apache.spark.graphx.PartitionStrategy)">partitionBy</a></strong>(<a href="../../../../org/apache/spark/graphx/PartitionStrategy.html" title="interface in org.apache.spark.graphx">PartitionStrategy</a> partitionStrategy)</code> +<div class="block">Repartitions the edges in the graph according to <code>partitionStrategy</code>.</div> +</td> +</tr> +<tr class="altColor"> +<td class="colFirst"><code>abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#partitionBy(org.apache.spark.graphx.PartitionStrategy,%20int)">partitionBy</a></strong>(<a href="../../../../org/apache/spark/graphx/PartitionStrategy.html" title="interface in org.apache.spark.graphx">PartitionStrategy</a> partitionStrategy, + int numPartitions)</code> +<div class="block">Repartitions the edges in the graph according to <code>partitionStrategy</code>.</div> +</td> +</tr> +<tr class="rowColor"> +<td class="colFirst"><code>abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#persist(org.apache.spark.storage.StorageLevel)">persist</a></strong>(<a href="../../../../org/apache/spark/storage/StorageLevel.html" title="class in org.apache.spark.storage">StorageLevel</a> newLevel)</code> +<div class="block">Caches the vertices and edges associated with this graph at the specified storage level, + ignoring any target storage levels previously set.</div> +</td> +</tr> +<tr class="altColor"> +<td class="colFirst"><code>abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#reverse()">reverse</a></strong>()</code> +<div class="block">Reverses all edges in the graph.</div> +</td> +</tr> +<tr class="rowColor"> +<td class="colFirst"><code>abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#subgraph(scala.Function1,%20scala.Function2)">subgraph</a></strong>(scala.Function1<<a href="../../../../org/apache/spark/graphx/EdgeTriplet.html" title="class in org.apache.spark.graphx">EdgeTriplet</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>>,Object> epred, + scala.Function2<Object,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,Object> vpred)</code> +<div class="block">Restricts the graph to only the vertices and edges satisfying the predicates.</div> +</td> +</tr> +<tr class="altColor"> +<td class="colFirst"><code>abstract <a href="../../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a><<a href="../../../../org/apache/spark/graphx/EdgeTriplet.html" title="class in org.apache.spark.graphx">EdgeTriplet</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>>></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#triplets()">triplets</a></strong>()</code> +<div class="block">An RDD containing the edge triplets, which are edges along with the vertex data associated with + the adjacent vertices.</div> +</td> +</tr> +<tr class="rowColor"> +<td class="colFirst"><code>abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#unpersist(boolean)">unpersist</a></strong>(boolean blocking)</code> +<div class="block">Uncaches both vertices and edges of this graph.</div> +</td> +</tr> +<tr class="altColor"> +<td class="colFirst"><code>abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#unpersistVertices(boolean)">unpersistVertices</a></strong>(boolean blocking)</code> +<div class="block">Uncaches only the vertices of this graph, leaving the edges alone.</div> +</td> +</tr> +<tr class="rowColor"> +<td class="colFirst"><code>abstract <a href="../../../../org/apache/spark/graphx/VertexRDD.html" title="class in org.apache.spark.graphx">VertexRDD</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>></code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/Graph.html#vertices()">vertices</a></strong>()</code> +<div class="block">An RDD containing the vertices and their associated attributes.</div> +</td> +</tr> +</table> +<ul class="blockList"> +<li class="blockList"><a name="methods_inherited_from_class_Object"> +<!-- --> +</a> +<h3>Methods inherited from class Object</h3> +<code>equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li> +</ul> +</li> +</ul> +</li> +</ul> +</div> +<div class="details"> +<ul class="blockList"> +<li class="blockList"> +<!-- ============ METHOD DETAIL ========== --> +<ul class="blockList"> +<li class="blockList"><a name="method_detail"> +<!-- --> +</a> +<h3>Method Detail</h3> +<a name="fromEdgeTuples(org.apache.spark.rdd.RDD,java.lang.Object,scala.Option,org.apache.spark.storage.StorageLevel,org.apache.spark.storage.StorageLevel,scala.reflect.ClassTag)"> +<!-- --> +</a><a name="fromEdgeTuples(org.apache.spark.rdd.RDD, VD, scala.Option, org.apache.spark.storage.StorageLevel, org.apache.spark.storage.StorageLevel, scala.reflect.ClassTag)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>fromEdgeTuples</h4> +<pre>public static <VD> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><VD,Object> fromEdgeTuples(<a href="../../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a><scala.Tuple2<Object,Object>> rawEdges, + VD defaultValue, + scala.Option<<a href="../../../../org/apache/spark/graphx/PartitionStrategy.html" title="interface in org.apache.spark.graphx">PartitionStrategy</a>> uniqueEdges, + <a href="../../../../org/apache/spark/storage/StorageLevel.html" title="class in org.apache.spark.storage">StorageLevel</a> edgeStorageLevel, + <a href="../../../../org/apache/spark/storage/StorageLevel.html" title="class in org.apache.spark.storage">StorageLevel</a> vertexStorageLevel, + scala.reflect.ClassTag<VD> evidence$16)</pre> +<div class="block">Construct a graph from a collection of edges encoded as vertex id pairs. + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>rawEdges</code> - a collection of edges in (src, dst) form</dd><dd><code>defaultValue</code> - the vertex attributes with which to create vertices referenced by the edges</dd><dd><code>uniqueEdges</code> - if multiple identical edges are found they are combined and the edge + attribute is set to the sum. Otherwise duplicate edges are treated as separate. To enable + <code>uniqueEdges</code>, a <a href="../../../../org/apache/spark/graphx/PartitionStrategy.html" title="interface in org.apache.spark.graphx"><code>PartitionStrategy</code></a> must be provided.</dd><dd><code>edgeStorageLevel</code> - the desired storage level at which to cache the edges if necessary</dd><dd><code>vertexStorageLevel</code> - the desired storage level at which to cache the vertices if necessary + <p></dd><dd><code>evidence$16</code> - (undocumented)</dd> +<dt><span class="strong">Returns:</span></dt><dd>a graph with edge attributes containing either the count of duplicate edges or 1 + (if <code>uniqueEdges</code> is <code>None</code>) and vertex attributes containing the total degree of each vertex.</dd></dl> +</li> +</ul> +<a name="fromEdges(org.apache.spark.rdd.RDD,java.lang.Object,org.apache.spark.storage.StorageLevel,org.apache.spark.storage.StorageLevel,scala.reflect.ClassTag,scala.reflect.ClassTag)"> +<!-- --> +</a><a name="fromEdges(org.apache.spark.rdd.RDD, VD, org.apache.spark.storage.StorageLevel, org.apache.spark.storage.StorageLevel, scala.reflect.ClassTag, scala.reflect.ClassTag)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>fromEdges</h4> +<pre>public static <VD,ED> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><VD,ED> fromEdges(<a href="../../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a><<a href="../../../../org/apache/spark/graphx/Edge.html" title="class in org.apache.spark.graphx">Edge</a><ED>> edges, + VD defaultValue, + <a href="../../../../org/apache/spark/storage/StorageLevel.html" title="class in org.apache.spark.storage">StorageLevel</a> edgeStorageLevel, + <a href="../../../../org/apache/spark/storage/StorageLevel.html" title="class in org.apache.spark.storage">StorageLevel</a> vertexStorageLevel, + scala.reflect.ClassTag<VD> evidence$17, + scala.reflect.ClassTag<ED> evidence$18)</pre> +<div class="block">Construct a graph from a collection of edges. + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>edges</code> - the RDD containing the set of edges in the graph</dd><dd><code>defaultValue</code> - the default vertex attribute to use for each vertex</dd><dd><code>edgeStorageLevel</code> - the desired storage level at which to cache the edges if necessary</dd><dd><code>vertexStorageLevel</code> - the desired storage level at which to cache the vertices if necessary + <p></dd><dd><code>evidence$17</code> - (undocumented)</dd><dd><code>evidence$18</code> - (undocumented)</dd> +<dt><span class="strong">Returns:</span></dt><dd>a graph with edge attributes described by <code>edges</code> and vertices + given by all vertices in <code>edges</code> with value <code>defaultValue</code></dd></dl> +</li> +</ul> +<a name="apply(org.apache.spark.rdd.RDD,org.apache.spark.rdd.RDD,java.lang.Object,org.apache.spark.storage.StorageLevel,org.apache.spark.storage.StorageLevel,scala.reflect.ClassTag,scala.reflect.ClassTag)"> +<!-- --> +</a><a name="apply(org.apache.spark.rdd.RDD, org.apache.spark.rdd.RDD, VD, org.apache.spark.storage.StorageLevel, org.apache.spark.storage.StorageLevel, scala.reflect.ClassTag, scala.reflect.ClassTag)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>apply</h4> +<pre>public static <VD,ED> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><VD,ED> apply(<a href="../../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a><scala.Tuple2<Object,VD>> vertices, + <a href="../../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a><<a href="../../../../org/apache/spark/graphx/Edge.html" title="class in org.apache.spark.graphx">Edge</a><ED>> edges, + VD defaultVertexAttr, + <a href="../../../../org/apache/spark/storage/StorageLevel.html" title="class in org.apache.spark.storage">StorageLevel</a> edgeStorageLevel, + <a href="../../../../org/apache/spark/storage/StorageLevel.html" title="class in org.apache.spark.storage">StorageLevel</a> vertexStorageLevel, + scala.reflect.ClassTag<VD> evidence$19, + scala.reflect.ClassTag<ED> evidence$20)</pre> +<div class="block">Construct a graph from a collection of vertices and + edges with attributes. Duplicate vertices are picked arbitrarily and + vertices found in the edge collection but not in the input + vertices are assigned the default attribute. + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>vertices</code> - the "set" of vertices and their attributes</dd><dd><code>edges</code> - the collection of edges in the graph</dd><dd><code>defaultVertexAttr</code> - the default vertex attribute to use for vertices that are + mentioned in edges but not in vertices</dd><dd><code>edgeStorageLevel</code> - the desired storage level at which to cache the edges if necessary</dd><dd><code>vertexStorageLevel</code> - the desired storage level at which to cache the vertices if necessary</dd><dd><code>evidence$19</code> - (undocumented)</dd><dd><code>evidence$20</code> - (undocumented)</dd> +<dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="graphToGraphOps(org.apache.spark.graphx.Graph, scala.reflect.ClassTag, scala.reflect.ClassTag)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>graphToGraphOps</h4> +<pre>public static <VD,ED> <a href="../../../../org/apache/spark/graphx/GraphOps.html" title="class in org.apache.spark.graphx">GraphOps</a><VD,ED> graphToGraphOps(<a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><VD,ED> g, + scala.reflect.ClassTag<VD> evidence$21, + scala.reflect.ClassTag<ED> evidence$22)</pre> +<div class="block">Implicitly extracts the <a href="../../../../org/apache/spark/graphx/GraphOps.html" title="class in org.apache.spark.graphx"><code>GraphOps</code></a> member from a graph. + <p> + To improve modularity the Graph type only contains a small set of basic operations. + All the convenience operations are defined in the <a href="../../../../org/apache/spark/graphx/GraphOps.html" title="class in org.apache.spark.graphx"><code>GraphOps</code></a> class which may be + shared across multiple graph implementations.</div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>g</code> - (undocumented)</dd><dd><code>evidence$21</code> - (undocumented)</dd><dd><code>evidence$22</code> - (undocumented)</dd> +<dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="vertices()"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>vertices</h4> +<pre>public abstract <a href="../../../../org/apache/spark/graphx/VertexRDD.html" title="class in org.apache.spark.graphx">VertexRDD</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>> vertices()</pre> +<div class="block">An RDD containing the vertices and their associated attributes. + <p></div> +<dl><dt><span class="strong">Returns:</span></dt><dd>an RDD containing the vertices in this graph</dd></dl> +</li> +</ul> +<a name="edges()"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>edges</h4> +<pre>public abstract <a href="../../../../org/apache/spark/graphx/EdgeRDD.html" title="class in org.apache.spark.graphx">EdgeRDD</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>> edges()</pre> +<div class="block">An RDD containing the edges and their associated attributes. The entries in the RDD contain + just the source id and target id along with the edge data. + <p></div> +<dl><dt><span class="strong">Returns:</span></dt><dd>an RDD containing the edges in this graph + <p></dd><dt><span class="strong">See Also:</span></dt><dd><code>Edge} for the edge type.</code>, +<code>Graph#triplets} to get an RDD which contains all the edges + along with their vertex data. + <p></code></dd></dl> +</li> +</ul> +<a name="triplets()"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>triplets</h4> +<pre>public abstract <a href="../../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a><<a href="../../../../org/apache/spark/graphx/EdgeTriplet.html" title="class in org.apache.spark.graphx">EdgeTriplet</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>>> triplets()</pre> +<div class="block">An RDD containing the edge triplets, which are edges along with the vertex data associated with + the adjacent vertices. The caller should use <code>edges</code> if the vertex data are not needed, i.e. + if only the edge data and adjacent vertex ids are needed. + <p></div> +<dl><dt><span class="strong">Returns:</span></dt><dd>an RDD containing edge triplets + <p></dd></dl> +</li> +</ul> +<a name="persist(org.apache.spark.storage.StorageLevel)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>persist</h4> +<pre>public abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>> persist(<a href="../../../../org/apache/spark/storage/StorageLevel.html" title="class in org.apache.spark.storage">StorageLevel</a> newLevel)</pre> +<div class="block">Caches the vertices and edges associated with this graph at the specified storage level, + ignoring any target storage levels previously set. + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>newLevel</code> - the level at which to cache the graph. + <p></dd> +<dt><span class="strong">Returns:</span></dt><dd>A reference to this graph for convenience.</dd></dl> +</li> +</ul> +<a name="cache()"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>cache</h4> +<pre>public abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>> cache()</pre> +<div class="block">Caches the vertices and edges associated with this graph at the previously-specified target + storage levels, which default to <code>MEMORY_ONLY</code>. This is used to pin a graph in memory enabling + multiple queries to reuse the same construction process.</div> +<dl><dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="checkpoint()"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>checkpoint</h4> +<pre>public abstract void checkpoint()</pre> +<div class="block">Mark this Graph for checkpointing. It will be saved to a file inside the checkpoint + directory set with SparkContext.setCheckpointDir() and all references to its parent + RDDs will be removed. It is strongly recommended that this Graph is persisted in + memory, otherwise saving it on a file will require recomputation.</div> +</li> +</ul> +<a name="isCheckpointed()"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>isCheckpointed</h4> +<pre>public abstract boolean isCheckpointed()</pre> +<div class="block">Return whether this Graph has been checkpointed or not. + This returns true iff both the vertices RDD and edges RDD have been checkpointed.</div> +<dl><dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="getCheckpointFiles()"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>getCheckpointFiles</h4> +<pre>public abstract scala.collection.Seq<String> getCheckpointFiles()</pre> +<div class="block">Gets the name of the files to which this Graph was checkpointed. + (The vertices RDD and edges RDD are checkpointed separately.)</div> +<dl><dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="unpersist(boolean)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>unpersist</h4> +<pre>public abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>> unpersist(boolean blocking)</pre> +<div class="block">Uncaches both vertices and edges of this graph. This is useful in iterative algorithms that + build a new graph in each iteration.</div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>blocking</code> - (undocumented)</dd> +<dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="unpersistVertices(boolean)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>unpersistVertices</h4> +<pre>public abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>> unpersistVertices(boolean blocking)</pre> +<div class="block">Uncaches only the vertices of this graph, leaving the edges alone. This is useful in iterative + algorithms that modify the vertex attributes but reuse the edges. This method can be used to + uncache the vertex attributes of previous iterations once they are no longer needed, improving + GC performance.</div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>blocking</code> - (undocumented)</dd> +<dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="partitionBy(org.apache.spark.graphx.PartitionStrategy)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>partitionBy</h4> +<pre>public abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>> partitionBy(<a href="../../../../org/apache/spark/graphx/PartitionStrategy.html" title="interface in org.apache.spark.graphx">PartitionStrategy</a> partitionStrategy)</pre> +<div class="block">Repartitions the edges in the graph according to <code>partitionStrategy</code>. + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>partitionStrategy</code> - the partitioning strategy to use when partitioning the edges + in the graph.</dd> +<dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="partitionBy(org.apache.spark.graphx.PartitionStrategy, int)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>partitionBy</h4> +<pre>public abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>> partitionBy(<a href="../../../../org/apache/spark/graphx/PartitionStrategy.html" title="interface in org.apache.spark.graphx">PartitionStrategy</a> partitionStrategy, + int numPartitions)</pre> +<div class="block">Repartitions the edges in the graph according to <code>partitionStrategy</code>. + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>partitionStrategy</code> - the partitioning strategy to use when partitioning the edges + in the graph.</dd><dd><code>numPartitions</code> - the number of edge partitions in the new graph.</dd> +<dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="mapVertices(scala.Function2, scala.reflect.ClassTag, scala.Predef.$eq$colon$eq)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>mapVertices</h4> +<pre>public abstract <VD2> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><VD2,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>> mapVertices(scala.Function2<Object,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,VD2> map, + scala.reflect.ClassTag<VD2> evidence$3, + scala.Predef.$eq$colon$eq<<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,VD2> eq)</pre> +<div class="block">Transforms each vertex attribute in the graph using the map function. + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>map</code> - the function from a vertex object to a new vertex value + <p></dd><dd><code>evidence$3</code> - (undocumented)</dd><dd><code>eq</code> - (undocumented)</dd> +<dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="mapEdges(scala.Function1, scala.reflect.ClassTag)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>mapEdges</h4> +<pre>public <ED2> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,ED2> mapEdges(scala.Function1<<a href="../../../../org/apache/spark/graphx/Edge.html" title="class in org.apache.spark.graphx">Edge</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>>,ED2> map, + scala.reflect.ClassTag<ED2> evidence$4)</pre> +<div class="block">Transforms each edge attribute in the graph using the map function. The map function is not + passed the vertex value for the vertices adjacent to the edge. If vertex values are desired, + use <code>mapTriplets</code>. + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>map</code> - the function from an edge object to a new edge value. + <p></dd><dd><code>evidence$4</code> - (undocumented)</dd> +<dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="mapEdges(scala.Function2, scala.reflect.ClassTag)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>mapEdges</h4> +<pre>public abstract <ED2> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,ED2> mapEdges(scala.Function2<Object,scala.collection.Iterator<<a href="../../../../org/apache/spark/graphx/Edge.html" title="class in org.apache.spark.graphx">Edge</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>>>,scala.collection.Iterator<ED2>> map, + scala.reflect.ClassTag<ED2> evidence$5)</pre> +<div class="block">Transforms each edge attribute using the map function, passing it a whole partition at a + time. The map function is given an iterator over edges within a logical partition as well as + the partition's ID, and it should return a new iterator over the new values of each edge. The + new iterator's elements must correspond one-to-one with the old iterator's elements. If + adjacent vertex values are desired, use <code>mapTriplets</code>. + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>map</code> - a function that takes a partition id and an iterator + over all the edges in the partition, and must return an iterator over + the new values for each edge in the order of the input iterator + <p></dd><dd><code>evidence$5</code> - (undocumented)</dd> +<dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="mapTriplets(scala.Function1, scala.reflect.ClassTag)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>mapTriplets</h4> +<pre>public <ED2> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,ED2> mapTriplets(scala.Function1<<a href="../../../../org/apache/spark/graphx/EdgeTriplet.html" title="class in org.apache.spark.graphx">EdgeTriplet</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>>,ED2> map, + scala.reflect.ClassTag<ED2> evidence$6)</pre> +<div class="block">Transforms each edge attribute using the map function, passing it the adjacent vertex + attributes as well. If adjacent vertex values are not required, + consider using <code>mapEdges</code> instead. + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>map</code> - the function from an edge object to a new edge value. + <p></dd><dd><code>evidence$6</code> - (undocumented)</dd> +<dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="mapTriplets(scala.Function1, org.apache.spark.graphx.TripletFields, scala.reflect.ClassTag)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>mapTriplets</h4> +<pre>public <ED2> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,ED2> mapTriplets(scala.Function1<<a href="../../../../org/apache/spark/graphx/EdgeTriplet.html" title="class in org.apache.spark.graphx">EdgeTriplet</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>>,ED2> map, + <a href="../../../../org/apache/spark/graphx/TripletFields.html" title="class in org.apache.spark.graphx">TripletFields</a> tripletFields, + scala.reflect.ClassTag<ED2> evidence$7)</pre> +<div class="block">Transforms each edge attribute using the map function, passing it the adjacent vertex + attributes as well. If adjacent vertex values are not required, + consider using <code>mapEdges</code> instead. + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>map</code> - the function from an edge object to a new edge value.</dd><dd><code>tripletFields</code> - which fields should be included in the edge triplet passed to the map + function. If not all fields are needed, specifying this can improve performance. + <p></dd><dd><code>evidence$7</code> - (undocumented)</dd> +<dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="mapTriplets(scala.Function2, org.apache.spark.graphx.TripletFields, scala.reflect.ClassTag)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>mapTriplets</h4> +<pre>public abstract <ED2> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,ED2> mapTriplets(scala.Function2<Object,scala.collection.Iterator<<a href="../../../../org/apache/spark/graphx/EdgeTriplet.html" title="class in org.apache.spark.graphx">EdgeTriplet</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>>>,scala.collection.Iterator<ED2>> map, + <a href="../../../../org/apache/spark/graphx/TripletFields.html" title="class in org.apache.spark.graphx">TripletFields</a> tripletFields, + scala.reflect.ClassTag<ED2> evidence$8)</pre> +<div class="block">Transforms each edge attribute a partition at a time using the map function, passing it the + adjacent vertex attributes as well. The map function is given an iterator over edge triplets + within a logical partition and should yield a new iterator over the new values of each edge in + the order in which they are provided. If adjacent vertex values are not required, consider + using <code>mapEdges</code> instead. + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>map</code> - the iterator transform</dd><dd><code>tripletFields</code> - which fields should be included in the edge triplet passed to the map + function. If not all fields are needed, specifying this can improve performance. + <p></dd><dd><code>evidence$8</code> - (undocumented)</dd> +<dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="reverse()"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>reverse</h4> +<pre>public abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>> reverse()</pre> +<div class="block">Reverses all edges in the graph. If this graph contains an edge from a to b then the returned + graph contains an edge from b to a.</div> +<dl><dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="subgraph(scala.Function1, scala.Function2)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>subgraph</h4> +<pre>public abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>> subgraph(scala.Function1<<a href="../../../../org/apache/spark/graphx/EdgeTriplet.html" title="class in org.apache.spark.graphx">EdgeTriplet</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>>,Object> epred, + scala.Function2<Object,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,Object> vpred)</pre> +<div class="block">Restricts the graph to only the vertices and edges satisfying the predicates. The resulting + subgraph satisifies + <p> + <pre><code> + V' = {v : for all v in V where vpred(v)} + E' = {(u,v): for all (u,v) in E where epred((u,v)) && vpred(u) && vpred(v)} + </code></pre> + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>epred</code> - the edge predicate, which takes a triplet and + evaluates to true if the edge is to remain in the subgraph. Note + that only edges where both vertices satisfy the vertex + predicate are considered. + <p></dd><dd><code>vpred</code> - the vertex predicate, which takes a vertex object and + evaluates to true if the vertex is to be included in the subgraph + <p></dd> +<dt><span class="strong">Returns:</span></dt><dd>the subgraph containing only the vertices and edges that + satisfy the predicates</dd></dl> +</li> +</ul> +<a name="mask(org.apache.spark.graphx.Graph, scala.reflect.ClassTag, scala.reflect.ClassTag)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>mask</h4> +<pre>public abstract <VD2,ED2> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>> mask(<a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><VD2,ED2> other, + scala.reflect.ClassTag<VD2> evidence$9, + scala.reflect.ClassTag<ED2> evidence$10)</pre> +<div class="block">Restricts the graph to only the vertices and edges that are also in <code>other</code>, but keeps the + attributes from this graph.</div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>other</code> - the graph to project this graph onto</dd><dd><code>evidence$9</code> - (undocumented)</dd><dd><code>evidence$10</code> - (undocumented)</dd> +<dt><span class="strong">Returns:</span></dt><dd>a graph with vertices and edges that exist in both the current graph and <code>other</code>, + with vertex and edge data from the current graph</dd></dl> +</li> +</ul> +<a name="groupEdges(scala.Function2)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>groupEdges</h4> +<pre>public abstract <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>> groupEdges(scala.Function2<<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>> merge)</pre> +<div class="block">Merges multiple edges between two vertices into a single edge. For correct results, the graph + must have been partitioned using <code>partitionBy</code>. + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>merge</code> - the user-supplied commutative associative function to merge edge attributes + for duplicate edges. + <p></dd> +<dt><span class="strong">Returns:</span></dt><dd>The resulting graph with a single edge for each (source, dest) vertex pair.</dd></dl> +</li> +</ul> +<a name="mapReduceTriplets(scala.Function1, scala.Function2, scala.Option, scala.reflect.ClassTag)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>mapReduceTriplets</h4> +<pre>public abstract <A> <a href="../../../../org/apache/spark/graphx/VertexRDD.html" title="class in org.apache.spark.graphx">VertexRDD</a><A> mapReduceTriplets(scala.Function1<<a href="../../../../org/apache/spark/graphx/EdgeTriplet.html" title="class in org.apache.spark.graphx">EdgeTriplet</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>>,scala.collection.Iterator<scala.Tuple2<Object,A>>> mapFunc, + scala.Function2<A,A,A> reduceFunc, + scala.Option<scala.Tuple2<<a href="../../../../org/apache/spark/graphx/VertexRDD.html" title="class in org.apache.spark.graphx">VertexRDD</a><?>,<a href="../../../../org/apache/spark/graphx/EdgeDirection.html" title="class in org.apache.spark.graphx">EdgeDirection</a>>> activeSetOpt, + scala.reflect.ClassTag<A> evidence$11)</pre> +<div class="block">Aggregates values from the neighboring edges and vertices of each vertex. The user supplied + <code>mapFunc</code> function is invoked on each edge of the graph, generating 0 or more "messages" to be + "sent" to either vertex in the edge. The <code>reduceFunc</code> is then used to combine the output of + the map phase destined to each vertex. + <p> + This function is deprecated in 1.2.0 because of SPARK-3936. Use aggregateMessages instead. + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>mapFunc</code> - the user defined map function which returns 0 or + more messages to neighboring vertices + <p></dd><dd><code>reduceFunc</code> - the user defined reduce function which should + be commutative and associative and is used to combine the output + of the map phase + <p></dd><dd><code>activeSetOpt</code> - an efficient way to run the aggregation on a subset of the edges if + desired. This is done by specifying a set of "active" vertices and an edge direction. The + <code>sendMsg</code> function will then run only on edges connected to active vertices by edges in the + specified direction. If the direction is <code>In</code>, <code>sendMsg</code> will only be run on edges with + destination in the active set. If the direction is <code>Out</code>, <code>sendMsg</code> will only be run on edges + originating from vertices in the active set. If the direction is <code>Either</code>, <code>sendMsg</code> will be + run on edges with *either* vertex in the active set. If the direction is <code>Both</code>, <code>sendMsg</code> + will be run on edges with *both* vertices in the active set. The active set must have the + same index as the graph's vertices. + <p></dd><dd><code>evidence$11</code> - (undocumented)</dd> +<dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="aggregateMessages(scala.Function1, scala.Function2, org.apache.spark.graphx.TripletFields, scala.reflect.ClassTag)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>aggregateMessages</h4> +<pre>public <A> <a href="../../../../org/apache/spark/graphx/VertexRDD.html" title="class in org.apache.spark.graphx">VertexRDD</a><A> aggregateMessages(scala.Function1<<a href="../../../../org/apache/spark/graphx/EdgeContext.html" title="class in org.apache.spark.graphx">EdgeContext</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>,A>,scala.runtime.BoxedUnit> sendMsg, + scala.Function2<A,A,A> mergeMsg, + <a href="../../../../org/apache/spark/graphx/TripletFields.html" title="class in org.apache.spark.graphx">TripletFields</a> tripletFields, + scala.reflect.ClassTag<A> evidence$12)</pre> +<div class="block">Aggregates values from the neighboring edges and vertices of each vertex. The user-supplied + <code>sendMsg</code> function is invoked on each edge of the graph, generating 0 or more messages to be + sent to either vertex in the edge. The <code>mergeMsg</code> function is then used to combine all messages + destined to the same vertex. + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>sendMsg</code> - runs on each edge, sending messages to neighboring vertices using the + <a href="../../../../org/apache/spark/graphx/EdgeContext.html" title="class in org.apache.spark.graphx"><code>EdgeContext</code></a>.</dd><dd><code>mergeMsg</code> - used to combine messages from <code>sendMsg</code> destined to the same vertex. This + combiner should be commutative and associative.</dd><dd><code>tripletFields</code> - which fields should be included in the <a href="../../../../org/apache/spark/graphx/EdgeContext.html" title="class in org.apache.spark.graphx"><code>EdgeContext</code></a> passed to the + <code>sendMsg</code> function. If not all fields are needed, specifying this can improve performance. + <p></dd><dd><code>evidence$12</code> - (undocumented)</dd> +<dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="outerJoinVertices(org.apache.spark.rdd.RDD, scala.Function3, scala.reflect.ClassTag, scala.reflect.ClassTag, scala.Predef.$eq$colon$eq)"> +<!-- --> +</a> +<ul class="blockList"> +<li class="blockList"> +<h4>outerJoinVertices</h4> +<pre>public abstract <U,VD2> <a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx">Graph</a><VD2,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>> outerJoinVertices(<a href="../../../../org/apache/spark/rdd/RDD.html" title="class in org.apache.spark.rdd">RDD</a><scala.Tuple2<Object,U>> other, + scala.Function3<Object,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,scala.Option<U>,VD2> mapFunc, + scala.reflect.ClassTag<U> evidence$14, + scala.reflect.ClassTag<VD2> evidence$15, + scala.Predef.$eq$colon$eq<<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,VD2> eq)</pre> +<div class="block">Joins the vertices with entries in the <code>table</code> RDD and merges the results using <code>mapFunc</code>. + The input table should contain at most one entry for each vertex. If no entry in <code>other</code> is + provided for a particular vertex in the graph, the map function receives <code>None</code>. + <p></div> +<dl><dt><span class="strong">Parameters:</span></dt><dd><code>other</code> - the table to join with the vertices in the graph. + The table should contain at most one entry for each vertex.</dd><dd><code>mapFunc</code> - the function used to compute the new vertex values. + The map function is invoked for all vertices, even those + that do not have a corresponding entry in the table. + <p></dd><dd><code>evidence$14</code> - (undocumented)</dd><dd><code>evidence$15</code> - (undocumented)</dd><dd><code>eq</code> - (undocumented)</dd> +<dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +<a name="ops()"> +<!-- --> +</a> +<ul class="blockListLast"> +<li class="blockList"> +<h4>ops</h4> +<pre>public <a href="../../../../org/apache/spark/graphx/GraphOps.html" title="class in org.apache.spark.graphx">GraphOps</a><<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">VD</a>,<a href="../../../../org/apache/spark/graphx/Graph.html" title="type parameter in Graph">ED</a>> ops()</pre> +<div class="block">The associated <a href="../../../../org/apache/spark/graphx/GraphOps.html" title="class in org.apache.spark.graphx"><code>GraphOps</code></a> object.</div> +<dl><dt><span class="strong">Returns:</span></dt><dd>(undocumented)</dd></dl> +</li> +</ul> +</li> +</ul> +</li> +</ul> +</div> +</div> +<!-- ========= END OF CLASS DATA ========= --> +<!-- ======= START OF BOTTOM NAVBAR ====== --> +<div class="bottomNav"><a name="navbar_bottom"> +<!-- --> +</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> +<!-- --> +</a> +<ul class="navList" title="Navigation"> +<li><a href="../../../../overview-summary.html">Overview</a></li> +<li><a href="package-summary.html">Package</a></li> +<li class="navBarCell1Rev">Class</li> +<li><a href="package-tree.html">Tree</a></li> +<li><a href="../../../../deprecated-list.html">Deprecated</a></li> +<li><a href="../../../../index-all.html">Index</a></li> +<li><a href="../../../../help-doc.html">Help</a></li> +</ul> +</div> +<div class="subNav"> +<ul class="navList"> +<li><a href="../../../../org/apache/spark/graphx/EdgeTriplet.html" title="class in org.apache.spark.graphx"><span class="strong">Prev Class</span></a></li> +<li><a href="../../../../org/apache/spark/graphx/GraphKryoRegistrator.html" title="class in org.apache.spark.graphx"><span class="strong">Next Class</span></a></li> +</ul> +<ul class="navList"> +<li><a href="../../../../index.html?org/apache/spark/graphx/Graph.html" target="_top">Frames</a></li> +<li><a href="Graph.html" target="_top">No Frames</a></li> +</ul> +<ul class="navList" id="allclasses_navbar_bottom"> +<li><a href="../../../../allclasses-noframe.html">All Classes</a></li> +</ul> +<div> +<script type="text/javascript"><!-- + allClassesLink = document.getElementById("allclasses_navbar_bottom"); + if(window==top) { + allClassesLink.style.display = "block"; + } + else { + allClassesLink.style.display = "none"; + } + //--> +</script> +</div> +<div> +<ul class="subNavList"> +<li>Summary: </li> +<li>Nested | </li> +<li>Field | </li> +<li>Constr | </li> +<li><a href="#method_summary">Method</a></li> +</ul> +<ul class="subNavList"> +<li>Detail: </li> +<li>Field | </li> +<li>Constr | </li> +<li><a href="#method_detail">Method</a></li> +</ul> +</div> +<a name="skip-navbar_bottom"> +<!-- --> +</a></div> +<!-- ======== END OF BOTTOM NAVBAR ======= --> +<script defer="defer" type="text/javascript" src="../../../../lib/jquery.js"></script><script defer="defer" type="text/javascript" src="../../../../lib/api-javadocs.js"></script></body> +</html>
http://git-wip-us.apache.org/repos/asf/spark-website/blob/24d32b75/site/docs/1.6.3/api/java/org/apache/spark/graphx/GraphKryoRegistrator.html ---------------------------------------------------------------------- diff --git a/site/docs/1.6.3/api/java/org/apache/spark/graphx/GraphKryoRegistrator.html b/site/docs/1.6.3/api/java/org/apache/spark/graphx/GraphKryoRegistrator.html new file mode 100644 index 0000000..15e419f --- /dev/null +++ b/site/docs/1.6.3/api/java/org/apache/spark/graphx/GraphKryoRegistrator.html @@ -0,0 +1,264 @@ +<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> +<!-- NewPage --> +<html lang="en"> +<head> +<!-- Generated by javadoc (version 1.7.0_79) on Wed Nov 02 15:16:13 PDT 2016 --> +<title>GraphKryoRegistrator (Spark 1.6.3 JavaDoc)</title> +<meta name="date" content="2016-11-02"> +<link rel="stylesheet" type="text/css" href="../../../../stylesheet.css" title="Style"> +</head> +<body> +<script type="text/javascript"><!-- + if (location.href.indexOf('is-external=true') == -1) { + parent.document.title="GraphKryoRegistrator (Spark 1.6.3 JavaDoc)"; + } +//--> +</script> +<noscript> +<div>JavaScript is disabled on your browser.</div> +</noscript> +<!-- ========= START OF TOP NAVBAR ======= --> +<div class="topNav"><a name="navbar_top"> +<!-- --> +</a><a href="#skip-navbar_top" title="Skip navigation links"></a><a name="navbar_top_firstrow"> +<!-- --> +</a> +<ul class="navList" title="Navigation"> +<li><a href="../../../../overview-summary.html">Overview</a></li> +<li><a href="package-summary.html">Package</a></li> +<li class="navBarCell1Rev">Class</li> +<li><a href="package-tree.html">Tree</a></li> +<li><a href="../../../../deprecated-list.html">Deprecated</a></li> +<li><a href="../../../../index-all.html">Index</a></li> +<li><a href="../../../../help-doc.html">Help</a></li> +</ul> +</div> +<div class="subNav"> +<ul class="navList"> +<li><a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx"><span class="strong">Prev Class</span></a></li> +<li><a href="../../../../org/apache/spark/graphx/GraphLoader.html" title="class in org.apache.spark.graphx"><span class="strong">Next Class</span></a></li> +</ul> +<ul class="navList"> +<li><a href="../../../../index.html?org/apache/spark/graphx/GraphKryoRegistrator.html" target="_top">Frames</a></li> +<li><a href="GraphKryoRegistrator.html" target="_top">No Frames</a></li> +</ul> +<ul class="navList" id="allclasses_navbar_top"> +<li><a href="../../../../allclasses-noframe.html">All Classes</a></li> +</ul> +<div> +<script type="text/javascript"><!-- + allClassesLink = document.getElementById("allclasses_navbar_top"); + if(window==top) { + allClassesLink.style.display = "block"; + } + else { + allClassesLink.style.display = "none"; + } + //--> +</script> +</div> +<div> +<ul class="subNavList"> +<li>Summary: </li> +<li>Nested | </li> +<li>Field | </li> +<li><a href="#constructor_summary">Constr</a> | </li> +<li><a href="#method_summary">Method</a></li> +</ul> +<ul class="subNavList"> +<li>Detail: </li> +<li>Field | </li> +<li><a href="#constructor_detail">Constr</a> | </li> +<li><a href="#method_detail">Method</a></li> +</ul> +</div> +<a name="skip-navbar_top"> +<!-- --> +</a></div> +<!-- ========= END OF TOP NAVBAR ========= --> +<!-- ======== START OF CLASS DATA ======== --> +<div class="header"> +<div class="subTitle">org.apache.spark.graphx</div> +<h2 title="Class GraphKryoRegistrator" class="title">Class GraphKryoRegistrator</h2> +</div> +<div class="contentContainer"> +<ul class="inheritance"> +<li>Object</li> +<li> +<ul class="inheritance"> +<li>org.apache.spark.graphx.GraphKryoRegistrator</li> +</ul> +</li> +</ul> +<div class="description"> +<ul class="blockList"> +<li class="blockList"> +<dl> +<dt>All Implemented Interfaces:</dt> +<dd><a href="../../../../org/apache/spark/serializer/KryoRegistrator.html" title="interface in org.apache.spark.serializer">KryoRegistrator</a></dd> +</dl> +<hr> +<br> +<pre>public class <span class="strong">GraphKryoRegistrator</span> +extends Object +implements <a href="../../../../org/apache/spark/serializer/KryoRegistrator.html" title="interface in org.apache.spark.serializer">KryoRegistrator</a></pre> +<div class="block">Registers GraphX classes with Kryo for improved performance.</div> +</li> +</ul> +</div> +<div class="summary"> +<ul class="blockList"> +<li class="blockList"> +<!-- ======== CONSTRUCTOR SUMMARY ======== --> +<ul class="blockList"> +<li class="blockList"><a name="constructor_summary"> +<!-- --> +</a> +<h3>Constructor Summary</h3> +<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Constructor Summary table, listing constructors, and an explanation"> +<caption><span>Constructors</span><span class="tabEnd"> </span></caption> +<tr> +<th class="colOne" scope="col">Constructor and Description</th> +</tr> +<tr class="altColor"> +<td class="colOne"><code><strong><a href="../../../../org/apache/spark/graphx/GraphKryoRegistrator.html#GraphKryoRegistrator()">GraphKryoRegistrator</a></strong>()</code> </td> +</tr> +</table> +</li> +</ul> +<!-- ========== METHOD SUMMARY =========== --> +<ul class="blockList"> +<li class="blockList"><a name="method_summary"> +<!-- --> +</a> +<h3>Method Summary</h3> +<table class="overviewSummary" border="0" cellpadding="3" cellspacing="0" summary="Method Summary table, listing methods, and an explanation"> +<caption><span>Methods</span><span class="tabEnd"> </span></caption> +<tr> +<th class="colFirst" scope="col">Modifier and Type</th> +<th class="colLast" scope="col">Method and Description</th> +</tr> +<tr class="altColor"> +<td class="colFirst"><code>void</code></td> +<td class="colLast"><code><strong><a href="../../../../org/apache/spark/graphx/GraphKryoRegistrator.html#registerClasses(com.esotericsoftware.kryo.Kryo)">registerClasses</a></strong>(com.esotericsoftware.kryo.Kryo kryo)</code> </td> +</tr> +</table> +<ul class="blockList"> +<li class="blockList"><a name="methods_inherited_from_class_Object"> +<!-- --> +</a> +<h3>Methods inherited from class Object</h3> +<code>equals, getClass, hashCode, notify, notifyAll, toString, wait, wait, wait</code></li> +</ul> +</li> +</ul> +</li> +</ul> +</div> +<div class="details"> +<ul class="blockList"> +<li class="blockList"> +<!-- ========= CONSTRUCTOR DETAIL ======== --> +<ul class="blockList"> +<li class="blockList"><a name="constructor_detail"> +<!-- --> +</a> +<h3>Constructor Detail</h3> +<a name="GraphKryoRegistrator()"> +<!-- --> +</a> +<ul class="blockListLast"> +<li class="blockList"> +<h4>GraphKryoRegistrator</h4> +<pre>public GraphKryoRegistrator()</pre> +</li> +</ul> +</li> +</ul> +<!-- ============ METHOD DETAIL ========== --> +<ul class="blockList"> +<li class="blockList"><a name="method_detail"> +<!-- --> +</a> +<h3>Method Detail</h3> +<a name="registerClasses(com.esotericsoftware.kryo.Kryo)"> +<!-- --> +</a> +<ul class="blockListLast"> +<li class="blockList"> +<h4>registerClasses</h4> +<pre>public void registerClasses(com.esotericsoftware.kryo.Kryo kryo)</pre> +<dl> +<dt><strong>Specified by:</strong></dt> +<dd><code><a href="../../../../org/apache/spark/serializer/KryoRegistrator.html#registerClasses(com.esotericsoftware.kryo.Kryo)">registerClasses</a></code> in interface <code><a href="../../../../org/apache/spark/serializer/KryoRegistrator.html" title="interface in org.apache.spark.serializer">KryoRegistrator</a></code></dd> +</dl> +</li> +</ul> +</li> +</ul> +</li> +</ul> +</div> +</div> +<!-- ========= END OF CLASS DATA ========= --> +<!-- ======= START OF BOTTOM NAVBAR ====== --> +<div class="bottomNav"><a name="navbar_bottom"> +<!-- --> +</a><a href="#skip-navbar_bottom" title="Skip navigation links"></a><a name="navbar_bottom_firstrow"> +<!-- --> +</a> +<ul class="navList" title="Navigation"> +<li><a href="../../../../overview-summary.html">Overview</a></li> +<li><a href="package-summary.html">Package</a></li> +<li class="navBarCell1Rev">Class</li> +<li><a href="package-tree.html">Tree</a></li> +<li><a href="../../../../deprecated-list.html">Deprecated</a></li> +<li><a href="../../../../index-all.html">Index</a></li> +<li><a href="../../../../help-doc.html">Help</a></li> +</ul> +</div> +<div class="subNav"> +<ul class="navList"> +<li><a href="../../../../org/apache/spark/graphx/Graph.html" title="class in org.apache.spark.graphx"><span class="strong">Prev Class</span></a></li> +<li><a href="../../../../org/apache/spark/graphx/GraphLoader.html" title="class in org.apache.spark.graphx"><span class="strong">Next Class</span></a></li> +</ul> +<ul class="navList"> +<li><a href="../../../../index.html?org/apache/spark/graphx/GraphKryoRegistrator.html" target="_top">Frames</a></li> +<li><a href="GraphKryoRegistrator.html" target="_top">No Frames</a></li> +</ul> +<ul class="navList" id="allclasses_navbar_bottom"> +<li><a href="../../../../allclasses-noframe.html">All Classes</a></li> +</ul> +<div> +<script type="text/javascript"><!-- + allClassesLink = document.getElementById("allclasses_navbar_bottom"); + if(window==top) { + allClassesLink.style.display = "block"; + } + else { + allClassesLink.style.display = "none"; + } + //--> +</script> +</div> +<div> +<ul class="subNavList"> +<li>Summary: </li> +<li>Nested | </li> +<li>Field | </li> +<li><a href="#constructor_summary">Constr</a> | </li> +<li><a href="#method_summary">Method</a></li> +</ul> +<ul class="subNavList"> +<li>Detail: </li> +<li>Field | </li> +<li><a href="#constructor_detail">Constr</a> | </li> +<li><a href="#method_detail">Method</a></li> +</ul> +</div> +<a name="skip-navbar_bottom"> +<!-- --> +</a></div> +<!-- ======== END OF BOTTOM NAVBAR ======= --> +<script defer="defer" type="text/javascript" src="../../../../lib/jquery.js"></script><script defer="defer" type="text/javascript" src="../../../../lib/api-javadocs.js"></script></body> +</html> --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org