[
https://issues.apache.org/jira/browse/FLINK-1520?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14900368#comment-14900368
]
ASF GitHub Bot commented on FLINK-1520:
---------------------------------------
Github user vasia commented on a diff in the pull request:
https://github.com/apache/flink/pull/1149#discussion_r39948692
--- Diff:
flink-staging/flink-gelly/src/main/java/org/apache/flink/graph/GraphCsvReader.java
---
@@ -0,0 +1,486 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.graph;
+import com.google.common.base.Preconditions;
+
+import org.apache.flink.api.common.functions.MapFunction;
+import org.apache.flink.api.java.DataSet;
+import org.apache.flink.api.java.io.CsvReader;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.api.java.tuple.Tuple3;
+import org.apache.flink.core.fs.Path;
+import org.apache.flink.types.NullValue;
+import org.apache.flink.api.java.ExecutionEnvironment;
+
+/**
+ * A class to build a Graph using path(s) provided to CSV file(s) with
optional vertex and edge data.
+ * The class also configures the CSV readers used to read edge and vertex
data such as the field types,
+ * the delimiters (row and field), the fields that should be included or
skipped, and other flags,
+ * such as whether to skip the initial line as the header.
+ * The configuration is done using the functions provided in the {@link
org.apache.flink.api.java.io.CsvReader} class.
+ */
+
+public class GraphCsvReader {
+
+ @SuppressWarnings("unused")
+ private final Path vertexPath, edgePath;
+ private final ExecutionEnvironment executionContext;
+ protected CsvReader edgeReader;
+ protected CsvReader vertexReader;
+ protected MapFunction<?, ?> mapper;
+ protected Class<?> vertexKey;
+ protected Class<?> vertexValue;
+ protected Class<?> edgeValue;
+
+//--------------------------------------------------------------------------------------------------------------------
+ public GraphCsvReader(Path vertexPath, Path edgePath,
ExecutionEnvironment context) {
+ this.vertexPath = vertexPath;
+ this.edgePath = edgePath;
+ this.vertexReader = new CsvReader(vertexPath, context);
+ this.edgeReader = new CsvReader(edgePath, context);
+ this.mapper = null;
+ this.executionContext = context;
+ }
+
+ public GraphCsvReader(Path edgePath, ExecutionEnvironment context) {
+ this.vertexPath = null;
+ this.edgePath = edgePath;
+ this.edgeReader = new CsvReader(edgePath, context);
+ this.vertexReader = null;
+ this.mapper = null;
+ this.executionContext = context;
+ }
+
+ public <K, VV> GraphCsvReader(Path edgePath, final MapFunction<K, VV>
mapper, ExecutionEnvironment context) {
+ this.vertexPath = null;
+ this.edgePath = edgePath;
+ this.edgeReader = new CsvReader(edgePath, context);
+ this.vertexReader = null;
+ this.mapper = mapper;
+ this.executionContext = context;
+ }
+
+ public GraphCsvReader (String edgePath, ExecutionEnvironment context) {
+ this(new Path(Preconditions.checkNotNull(edgePath, "The file
path may not be null.")), context);
+
+ }
+
+ public GraphCsvReader(String vertexPath, String edgePath,
ExecutionEnvironment context) {
+ this(new Path(Preconditions.checkNotNull(vertexPath, "The file
path may not be null.")),
+ new Path(Preconditions.checkNotNull(edgePath,
"The file path may not be null.")), context);
+ }
+
+
+ public <K, VV> GraphCsvReader(String edgePath, final MapFunction<K, VV>
mapper, ExecutionEnvironment context) {
+ this(new Path(Preconditions.checkNotNull(edgePath, "The
file path may not be null.")), mapper, context);
+ }
+
+ /**
+ * Creates a Graph from CSV input with vertex values and edge values.
+ * The vertex values are specified through a vertices input file or a
user-defined map function.
+ *
+ * @param vertexKey the type of the vertex IDs
+ * @param vertexValue the type of the vertex values
+ * @param edgeValue the type of the edge values
+ * @return a Graph with vertex and edge values.
+ */
+ @SuppressWarnings("unchecked")
+ public <K, VV, EV> Graph<K, VV, EV> types(Class<K> vertexKey, Class<VV>
vertexValue,
+ Class<EV> edgeValue) {
+
+ DataSet<Tuple2<K, VV>> vertices = null;
+
+ if (edgeReader == null) {
+ throw new RuntimeException("The edges input file cannot
be null!");
+ }
+
+ DataSet<Tuple3<K, K, EV>> edges = edgeReader.types(vertexKey,
vertexKey, edgeValue);
+
+ // the vertex value can be provided by an input file or a
user-defined mapper
+ if (vertexReader != null) {
+ vertices = vertexReader.types(vertexKey, vertexValue);
+ return Graph.fromTupleDataSet(vertices, edges,
executionContext);
+ }
+ else if (mapper != null) {
+ return Graph.fromTupleDataSet(edges, (MapFunction<K,
VV>) mapper, executionContext);
+ }
+ else {
+ throw new RuntimeException("Vertex values have to be
specified through a vertices input file"
+ + "or a user-defined map function.");
+ }
+ }
+
+ /**
+ * Creates a Graph from CSV input with edge values, but without vertex
values.
+ * @param vertexKey the type of the vertex IDs
+ * @param edgeValue the type of the edge values
+ * @return a Graph where the edges are read from an edges CSV file
(with values).
+ */
+ public <K, EV> Graph<K, NullValue, EV> edgeTypes(Class<K> vertexKey,
Class<EV> edgeValue) {
+
+ if (edgeReader == null) {
+ throw new RuntimeException("The edges input file cannot
be null!");
+ }
+
+ DataSet<Tuple3<K, K, EV>> edges = edgeReader.types(vertexKey,
vertexKey, edgeValue);
+
+ return Graph.fromTupleDataSet(edges, executionContext);
+ }
+
+ /**
+ * Creates a Graph from CSV input without vertex values or edge values.
+ * @param vertexKey the type of the vertex IDs
+ * @return a Graph where the vertex IDs are read from the edges input
file.
+ */
+ public <K> Graph<K, NullValue, NullValue> keyType(Class<K> vertexKey) {
+
+ if (edgeReader == null) {
+ throw new RuntimeException("The edges input file cannot
be null!");
+ }
+
+ @SuppressWarnings("serial")
+ DataSet<Tuple3<K, K, NullValue>> edges =
edgeReader.types(vertexKey, vertexKey)
+ .map(new MapFunction<Tuple2<K, K>, Tuple3<K, K,
NullValue>>() {
+
+ public Tuple3<K, K, NullValue>
map(Tuple2<K, K> edge) {
+ return new Tuple3<K, K,
NullValue>(edge.f0, edge.f1, NullValue.getInstance());
--- End diff --
good catch! thnx!
> Read edges and vertices from CSV files
> --------------------------------------
>
> Key: FLINK-1520
> URL: https://issues.apache.org/jira/browse/FLINK-1520
> Project: Flink
> Issue Type: New Feature
> Components: Gelly
> Reporter: Vasia Kalavri
> Assignee: Vasia Kalavri
> Priority: Minor
> Labels: easyfix, newbie
>
> Add methods to create Vertex and Edge Datasets directly from CSV file inputs.
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)