This is an automated email from the ASF dual-hosted git repository.

nnag pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/geode-kafka-connector.git

commit da1bfb67f536e22c2739944496df0c7512af42ba
Author: Jason Huynh <[email protected]>
AuthorDate: Wed Jan 15 15:10:24 2020 -0800

    added log4j and able to stand up geode/kafka/zookeeper
---
 build.gradle                                    |  5 +++++
 src/main/java/kafka/GeodeKafkaSource.java       |  1 +
 src/main/java/kafka/GeodeKafkaSourceTask.java   |  2 +-
 src/test/java/kafka/GeodeKafkaTestCluster.java  |  2 +-
 src/test/java/kafka/LocatorLauncherWrapper.java |  4 +++-
 src/test/java/kafka/ServerLauncherWrapper.java  | 12 ++++++++++--
 6 files changed, 21 insertions(+), 5 deletions(-)

diff --git a/build.gradle b/build.gradle
index d694dfa..9e23d87 100644
--- a/build.gradle
+++ b/build.gradle
@@ -15,10 +15,15 @@ dependencies {
 
     compile 'org.apache.geode:geode-core:1.11.0'
     compile(group: 'org.apache.kafka', name: 'connect-api', version: '2.3.1')
+    compile group: 'org.apache.logging.log4j', name: 'log4j-api', version: 
'2.13.0'
+    compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: 
'2.13.0'
+
+
     testCompile(group: 'org.apache.kafka', name: 'kafka_2.12', version: 
'2.3.1')
     testCompile(group: 'org.apache.kafka', name: 'kafka-streams-test-utils', 
version: '1.1.0')
     testCompile(group: 'org.apache.curator', name: 'curator-framework', 
version: '4.2.0')
     testCompile(group: 'org.apache.kafka', name: 'connect-runtime', version: 
'2.3.1')
 
     testCompile group: 'junit', name: 'junit', version: '4.12'
+
 }
diff --git a/src/main/java/kafka/GeodeKafkaSource.java 
b/src/main/java/kafka/GeodeKafkaSource.java
index d1599e9..8b88e81 100644
--- a/src/main/java/kafka/GeodeKafkaSource.java
+++ b/src/main/java/kafka/GeodeKafkaSource.java
@@ -27,6 +27,7 @@ public class GeodeKafkaSource extends SourceConnector {
 
   @Override
   public List<Map<String, String>> taskConfigs(int maxTasks) {
+    System.out.println("GKSource: taskConfigs");
     List<Map<String, String>> taskConfigs = new ArrayList<>();
     Map<String, String> taskProps = new HashMap<>();
 
diff --git a/src/main/java/kafka/GeodeKafkaSourceTask.java 
b/src/main/java/kafka/GeodeKafkaSourceTask.java
index 6492199..e6aa578 100644
--- a/src/main/java/kafka/GeodeKafkaSourceTask.java
+++ b/src/main/java/kafka/GeodeKafkaSourceTask.java
@@ -49,7 +49,7 @@ public class GeodeKafkaSourceTask extends SourceTask {
     offset = new HashMap<>();
     offset.put("OFFSET", 0L);
 
-    installOnGeode("localHost", 18888, "someRegion");
+    installOnGeode("localHost", 10334, "someRegion");
   }
 
   @Override
diff --git a/src/test/java/kafka/GeodeKafkaTestCluster.java 
b/src/test/java/kafka/GeodeKafkaTestCluster.java
index fbaba2c..824b148 100644
--- a/src/test/java/kafka/GeodeKafkaTestCluster.java
+++ b/src/test/java/kafka/GeodeKafkaTestCluster.java
@@ -39,7 +39,7 @@ public class GeodeKafkaTestCluster {
   }
 
   private ClientCache createGeodeClient() {
-    return new ClientCacheFactory().addPoolLocator("127.0.0.1", 
10334).create();
+    return new ClientCacheFactory().addPoolLocator("localhost", 
10334).create();
   }
 
   private static void startZooKeeper() throws IOException, 
QuorumPeerConfig.ConfigException {
diff --git a/src/test/java/kafka/LocatorLauncherWrapper.java 
b/src/test/java/kafka/LocatorLauncherWrapper.java
index fe351be..b4340c4 100644
--- a/src/test/java/kafka/LocatorLauncherWrapper.java
+++ b/src/test/java/kafka/LocatorLauncherWrapper.java
@@ -16,8 +16,10 @@ public class LocatorLauncherWrapper {
 //        String statsFile = new File(context.getOutputDir(), 
"stats.gfs").getAbsolutePath();
 //        
properties.setProperty(ConfigurationPropert/**/ies.STATISTIC_ARCHIVE_FILE, 
statsFile);
         properties.setProperty(ConfigurationProperties.NAME, "locator1");
-        Locator.startLocatorAndDS(10334, null, properties);
+        Locator.startLocatorAndDS(10334, new 
File("/Users/jhuynh/Pivotal/geode-kafka-connector/"), properties);
+        while (true) {
 
+        }
 //
 //        LocatorLauncher locatorLauncher  = new LocatorLauncher.Builder()
 //                .setMemberName("locator1")
diff --git a/src/test/java/kafka/ServerLauncherWrapper.java 
b/src/test/java/kafka/ServerLauncherWrapper.java
index 68161c2..7493d6b 100644
--- a/src/test/java/kafka/ServerLauncherWrapper.java
+++ b/src/test/java/kafka/ServerLauncherWrapper.java
@@ -2,6 +2,7 @@ package kafka;
 
 import org.apache.geode.cache.Cache;
 import org.apache.geode.cache.CacheFactory;
+import org.apache.geode.cache.RegionShortcut;
 import org.apache.geode.cache.server.CacheServer;
 import org.apache.geode.distributed.ConfigurationProperties;
 import org.apache.geode.distributed.ServerLauncher;
@@ -36,13 +37,20 @@ public class ServerLauncherWrapper {
                 .set(ConfigurationProperties.LOCATORS, locatorString)
                 .set(ConfigurationProperties.NAME,
                         "server-1")
-                .set(ConfigurationProperties.LOG_FILE, 
"/Users/jhuynh/Pivotal/geode-kafka-connector/server1.log")
+                .set(ConfigurationProperties.LOG_FILE, 
"/Users/jhuynh/Pivotal/geode-kafka-connector/")
                 .set(ConfigurationProperties.LOG_LEVEL, "info")
 //               .set(ConfigurationProperties.STATISTIC_ARCHIVE_FILE, 
statsFile)
                 .create();
         CacheServer cacheServer = cache.addCacheServer();
         cacheServer.setPort(0);
-        cacheServer.setMaxConnections(Integer.MAX_VALUE);
+//        cacheServer.setMaxConnections(Integer.MAX_VALUE);
         cacheServer.start();
+
+        //create the region
+        
cache.createRegionFactory(RegionShortcut.PARTITION).create("someRegion");
+        System.out.println("starting cacheserver");
+        while (true) {
+
+        }
     }
 }

Reply via email to