[
https://issues.apache.org/jira/browse/FLINK-3115?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=15200347#comment-15200347
]
ASF GitHub Bot commented on FLINK-3115:
---------------------------------------
Github user smarthi commented on a diff in the pull request:
https://github.com/apache/flink/pull/1792#discussion_r56575375
--- Diff:
flink-streaming-connectors/flink-connector-elasticsearch2/src/test/java/org/apache/flink/streaming/connectors/elasticsearch2/ElasticsearchSinkITCase.java
---
@@ -0,0 +1,211 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.flink.streaming.connectors.elasticsearch2;
+
+import org.apache.flink.api.common.functions.RuntimeContext;
+import org.apache.flink.api.java.tuple.Tuple2;
+import org.apache.flink.runtime.client.JobExecutionException;
+import org.apache.flink.streaming.api.datastream.DataStreamSource;
+import
org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.streaming.api.functions.source.SourceFunction;
+import org.apache.flink.streaming.util.StreamingMultipleProgramsTestBase;
+import org.elasticsearch.action.get.GetRequest;
+import org.elasticsearch.action.get.GetResponse;
+import org.elasticsearch.action.index.IndexRequest;
+import org.elasticsearch.client.Client;
+import org.elasticsearch.client.Requests;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.node.Node;
+import org.elasticsearch.node.NodeBuilder;
+import org.junit.Assert;
+import org.junit.ClassRule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+import java.io.File;
+import java.net.InetSocketAddress;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class ElasticsearchSinkITCase extends
StreamingMultipleProgramsTestBase {
+
+ private static final int NUM_ELEMENTS = 20;
+
+ @ClassRule
+ public static TemporaryFolder tempFolder = new TemporaryFolder();
+
+ @Test
+ public void testNodeClient() throws Exception{
+
+ File dataDir = tempFolder.newFolder();
+
+ Node node = NodeBuilder.nodeBuilder()
+ .settings(Settings.settingsBuilder()
+ .put("path.home",
dataDir.getParent())
+ .put("http.enabled", false)
+ .put("path.data",
dataDir.getAbsolutePath()))
+ // set a custom cluster name to verify that
user config works correctly
+ .clusterName("my-node-client-cluster")
+ .local(true)
+ .node();
+
+ final StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment();
+
+ DataStreamSource<Tuple2<Integer, String>> source =
env.addSource(new TestSourceFunction());
+
+ Map<String, String> config = new HashMap<>();
+ // This instructs the sink to emit after every element,
otherwise they would be buffered
+ config.put(ElasticsearchSink.CONFIG_KEY_BULK_FLUSH_MAX_ACTIONS,
"1");
+ config.put("cluster.name", "my-node-client-cluster");
+
+ // connect to our local node
+ config.put("node.local", "true");
+
+ // need this with ElasticSearch v2.x
+ config.put("path.home", dataDir.getParent());
+
+ source.addSink(new ElasticsearchSink<>(config, new
TestElasticsearchSinkFunction()));
+
+ env.execute("Elasticsearch Node Client Test");
+
+ // verify the results
+ Client client = node.client();
+ for (int i = 0; i < NUM_ELEMENTS; i++) {
+ GetResponse response = client.get(new
GetRequest("my-index",
+ "my-type",
Integer.toString(i))).actionGet();
+ Assert.assertEquals("message #" + i,
response.getSource().get("data"));
+ }
+
+ node.close();
+ }
+
+ @Test
+ public void testTransportClient() throws Exception {
+
+ File dataDir = tempFolder.newFolder();
+
+ Node node = NodeBuilder.nodeBuilder()
+ .settings(Settings.settingsBuilder()
+ .put("path.home",
dataDir.getParent())
+ .put("http.enabled", false)
+ .put("path.data",
dataDir.getAbsolutePath()))
+ // set a custom cluster name to verify that
user config works correctly
+ .clusterName("my-transport-client-cluster")
+ .local(true)
+ .node();
+
+ final StreamExecutionEnvironment env =
StreamExecutionEnvironment.getExecutionEnvironment();
+
+ DataStreamSource<Tuple2<Integer, String>> source =
env.addSource(new TestSourceFunction());
+
+ Map<String, String> config = new HashMap<>();
+ // This instructs the sink to emit after every element,
otherwise they would be buffered
+ config.put(ElasticsearchSink.CONFIG_KEY_BULK_FLUSH_MAX_ACTIONS,
"1");
+ config.put("cluster.name", "my-transport-client-cluster");
+
+ // need this with ElasticSearch v2.x
+ config.put("path.home", dataDir.getParent());
+
+ // connect to our local node
+ config.put("node.local", "true");
+
+ // Can't use {@link TransportAddress} as its not Serializable
in Elasticsearch 2.x
+ List<InetSocketAddress> transports = new ArrayList<>();
+ transports.add(new InetSocketAddress(9300));
+
+ source.addSink(new ElasticsearchSink<>(config, null, new
TestElasticsearchSinkFunction()));
--- End diff --
Thanks @zcox. the fix works, will update the PR
> Update Elasticsearch connector to 2.X
> -------------------------------------
>
> Key: FLINK-3115
> URL: https://issues.apache.org/jira/browse/FLINK-3115
> Project: Flink
> Issue Type: Improvement
> Components: Streaming Connectors
> Affects Versions: 0.10.0, 1.0.0, 0.10.1
> Reporter: Maximilian Michels
> Assignee: Suneel Marthi
> Fix For: 1.0.0
>
>
> The Elasticsearch connector is not up to date anymore. In version 2.X the API
> changed. The code needs to be adapted. Probably it makes sense to have a new
> class {{ElasticsearchSink2}}.
--
This message was sent by Atlassian JIRA
(v6.3.4#6332)