zyxxoo commented on code in PR #2615:
URL: 
https://github.com/apache/incubator-hugegraph/pull/2615#discussion_r1759741851


##########
hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/AbstractConfig.java:
##########
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.config;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hugegraph.ct.base.HGTestLogger;
+import org.slf4j.Logger;
+
+public abstract class AbstractConfig {
+
+    protected static final Logger LOG = HGTestLogger.LOG;
+    protected String config;
+    protected Map<String, String> properties = new HashMap<>();
+    protected String fileName;
+
+    protected void readTemplate(String filePath) {
+        try {
+            this.config = new String(Files.readAllBytes(Paths.get(filePath)));
+        } catch (IOException e) {
+            LOG.error("failed to get file", e);
+        }
+    }
+
+    protected void updateConfigs() {
+        for (Map.Entry<String, String> entry : properties.entrySet()) {
+            String placeholder = "$" + entry.getKey() + "$";
+            config = config.replace(placeholder, entry.getValue());

Review Comment:
   虽然说自己写也行,就是好奇为啥不考虑类似 FreeMarker 来做模版渲染呢?



##########
hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/GraphConfig.java:
##########
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.config;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.CT_PACKAGE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.GRAPH_TEMPLATE_FILE;
+import static 
org.apache.hugegraph.ct.base.ClusterConstant.HUGEGRAPH_PROPERTIES;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+public class GraphConfig extends AbstractConfig {
+
+    public GraphConfig() {
+        readTemplate(CT_PACKAGE_PATH + GRAPH_TEMPLATE_FILE);

Review Comment:
   DI



##########
hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/AbstractConfig.java:
##########
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.config;
+
+import java.io.File;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hugegraph.ct.base.HGTestLogger;
+import org.slf4j.Logger;
+
+public abstract class AbstractConfig {
+
+    protected static final Logger LOG = HGTestLogger.LOG;
+    protected String config;
+    protected Map<String, String> properties = new HashMap<>();
+    protected String fileName;
+
+    protected void readTemplate(String filePath) {
+        try {
+            this.config = new String(Files.readAllBytes(Paths.get(filePath)));
+        } catch (IOException e) {
+            LOG.error("failed to get file", e);
+        }
+    }
+
+    protected void updateConfigs() {
+        for (Map.Entry<String, String> entry : properties.entrySet()) {
+            String placeholder = "$" + entry.getKey() + "$";
+            config = config.replace(placeholder, entry.getValue());
+        }
+    }
+
+    public void writeConfig(String filePath) {
+        updateConfigs();
+        String destPath = filePath + File.separator + fileName;

Review Comment:
   Paths.get



##########
hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/BaseMultiClusterTest.java:
##########
@@ -0,0 +1,60 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.MultiClusterTest;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+
+import org.apache.hugegraph.ct.env.BaseEnv;
+import org.apache.hugegraph.ct.env.MultiNodeEnv;
+import org.junit.AfterClass;
+import org.junit.BeforeClass;
+
+public class BaseMultiClusterTest {
+
+    protected static BaseEnv env;
+
+    protected static Process p;
+
+    @BeforeClass
+    public static void initEnv() throws InterruptedException {
+        env = new MultiNodeEnv();
+        env.startCluster();
+    }
+
+    @AfterClass
+    public static void clearEnv() throws InterruptedException {
+        env.clearCluster();
+        Thread.sleep(2000);
+    }
+
+    protected String execCurl(String[] cmds) throws IOException {

Review Comment:
   execCmd



##########
hugegraph-cluster-test/hugegraph-clustertest-test/src/main/java/org/apache/hugegraph/MultiClusterTest/MultiClusterDeployTest.java:
##########
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.MultiClusterTest;
+
+import java.io.IOException;
+import java.util.List;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+public class MultiClusterDeployTest extends BaseMultiClusterTest {
+
+    @Test
+    public void testPDNodesDeployment() throws IOException {
+        List<String> addrs = env.getPDRestAddrs();
+        for (String addr : addrs) {
+            String url = addr;
+            String[] cmds = {"curl", url};
+            StringBuffer sb = new StringBuffer();
+            for (int i = 0; i < cmds.length; i++) {
+                sb.append(cmds[i] + " ");
+            }
+            String responseMsg = execCurl(cmds);
+            Assert.assertEquals(responseMsg, "");
+        }
+    }
+
+    @Test
+    public void testStoreNodesDeployment() throws IOException, 
InterruptedException {
+        List<String> addrs = env.getStoreRestAddrs();
+        for (String addr : addrs) {
+            String url = addr;
+            String[] cmds = {"curl", url};
+            StringBuffer sb = new StringBuffer();
+            for (int i = 0; i < cmds.length; i++) {
+                sb.append(cmds[i] + " ");
+            }
+            String responseMsg = execCurl(cmds);
+            Assert.assertTrue(responseMsg.startsWith("{"));
+        }
+    }
+
+    @Test
+    public void testServerNodesDeployment() throws IOException, 
InterruptedException {
+        List<String> addrs = env.getServerRestAddrs();
+        for (String addr : addrs) {
+            String url = addr;
+            String[] cmds = {"curl", url};
+            StringBuffer sb = new StringBuffer();
+            for (int i = 0; i < cmds.length; i++) {
+                sb.append(cmds[i] + " ");
+            }
+            String responseMsg = execCurl(cmds);

Review Comment:
   最好校验数据,返回的json 可以解析一下



##########
hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/config/PDConfig.java:
##########
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.config;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.APPLICATION_FILE;
+import static org.apache.hugegraph.ct.base.ClusterConstant.CT_PACKAGE_PATH;
+import static org.apache.hugegraph.ct.base.ClusterConstant.PD_TEMPLATE_FILE;
+import static org.apache.hugegraph.ct.base.EnvUtil.getAvailablePort;
+
+import java.util.List;
+import java.util.stream.Collectors;
+
+import lombok.Getter;
+
+public class PDConfig extends AbstractConfig {
+
+    @Getter
+    private final int raftPort, grpcPort, restPort;
+    @Getter
+    private String grpcHost, dataPath, raftHost;
+
+    public PDConfig() {
+        readTemplate(CT_PACKAGE_PATH + PD_TEMPLATE_FILE);
+        this.fileName = APPLICATION_FILE;
+        this.grpcHost = "127.0.0.1";
+        this.raftHost = "127.0.0.1";
+        this.dataPath = "./pd_data";
+        this.raftPort = getAvailablePort();
+        this.grpcPort = getAvailablePort();
+        this.restPort = getAvailablePort();
+        properties.put("GRPC_HOST", grpcHost);
+        properties.put("GRPC_PORT", String.valueOf(grpcPort));
+        properties.put("REST_PORT", String.valueOf(restPort));
+        properties.put("PD_DATA_PATH", dataPath);

Review Comment:
   DI,这些可以作为默认的参数



##########
hugegraph-cluster-test/hugegraph-clustertest-minicluster/src/main/java/org/apache/hugegraph/ct/env/AbstractEnv.java:
##########
@@ -0,0 +1,179 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.ct.env;
+
+import static org.apache.hugegraph.ct.base.ClusterConstant.CONF_DIR;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hugegraph.ct.base.HGTestLogger;
+import org.apache.hugegraph.ct.config.ClusterConf;
+import org.apache.hugegraph.ct.config.GraphConfig;
+import org.apache.hugegraph.ct.config.PDConfig;
+import org.apache.hugegraph.ct.config.ServerConfig;
+import org.apache.hugegraph.ct.config.StoreConfig;
+import org.apache.hugegraph.ct.node.PDNodeWrapper;
+import org.apache.hugegraph.ct.node.ServerNodeWrapper;
+import org.apache.hugegraph.ct.node.StoreNodeWrapper;
+import org.slf4j.Logger;
+
+import lombok.Setter;
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public abstract class AbstractEnv implements BaseEnv {
+
+    private static final Logger LOG = HGTestLogger.LOG;
+    protected ClusterConf clusterConf;
+    protected List<PDNodeWrapper> pdNodeWrappers;
+    protected List<ServerNodeWrapper> serverNodeWrappers;
+    protected List<StoreNodeWrapper> storeNodeWrappers;
+    @Setter
+    protected int cluster_id = 0;
+
+    protected AbstractEnv() {
+        this.pdNodeWrappers = new ArrayList<>();
+        this.serverNodeWrappers = new ArrayList<>();
+        this.storeNodeWrappers = new ArrayList<>();
+    }
+
+    protected void init(int pdCnt, int storeCnt, int serverCnt) {
+        this.clusterConf = new ClusterConf(pdCnt, storeCnt, serverCnt);
+        for (int i = 0; i < pdCnt; i++) {
+            PDNodeWrapper pdNodeWrapper = new PDNodeWrapper(cluster_id, i);
+            PDConfig pdConfig = clusterConf.getPDConfig(i);
+            pdNodeWrappers.add(pdNodeWrapper);
+            pdConfig.writeConfig(pdNodeWrapper.getNodePath()
+                                 + CONF_DIR);
+        }
+
+        for (int i = 0; i < storeCnt; i++) {
+            StoreNodeWrapper storeNodeWrapper = new 
StoreNodeWrapper(cluster_id, i);
+            StoreConfig storeConfig = clusterConf.getStoreConfig(i);
+            storeNodeWrappers.add(storeNodeWrapper);
+            storeConfig.writeConfig(storeNodeWrapper.getNodePath()
+                                    + CONF_DIR);
+        }
+
+        for (int i = 0; i < serverCnt; i++) {
+            ServerNodeWrapper serverNodeWrapper = new 
ServerNodeWrapper(cluster_id, i);
+            serverNodeWrappers.add(serverNodeWrapper);
+            ServerConfig serverConfig = clusterConf.getServerConfig(i);
+            serverConfig.setServerID(serverNodeWrapper.getID());
+            GraphConfig graphConfig = clusterConf.getGraphConfig(i);
+            if (i == 0) {
+                serverConfig.setRole("master");
+            } else {
+                serverConfig.setRole("worker");
+            }
+            serverConfig.writeConfig(serverNodeWrapper.getNodePath()
+                                     + CONF_DIR);
+            graphConfig.writeConfig(serverNodeWrapper.getNodePath()
+                                    + CONF_DIR);
+        }
+    }
+
+    public void startCluster() {
+        for (int i = 0; i < pdNodeWrappers.size(); i++) {
+            PDNodeWrapper pdNodeWrapper = pdNodeWrappers.get(i);
+            pdNodeWrapper.start();
+            while (!pdNodeWrapper.isStarted()) {
+                try {
+                    Thread.sleep(1000);
+                } catch (InterruptedException e) {
+                    throw new RuntimeException(e);
+                }
+            }
+        }
+        for (int i = 0; i < storeNodeWrappers.size(); i++) {
+            StoreNodeWrapper storeNodeWrapper = storeNodeWrappers.get(i);
+            storeNodeWrapper.start();
+            while (!storeNodeWrapper.isStarted()) {
+                try {
+                    Thread.sleep(1000);
+                } catch (InterruptedException e) {
+                    throw new RuntimeException(e);
+                }
+            }
+        }
+        for (ServerNodeWrapper serverNodeWrapper : serverNodeWrappers) {
+            serverNodeWrapper.start();
+            while (!serverNodeWrapper.isStarted()) {
+                try {
+                    Thread.sleep(1000);
+                } catch (InterruptedException e) {
+                    throw new RuntimeException(e);
+                }
+            }
+        }
+    }
+
+    public void clearCluster() {

Review Comment:
   stopCluster?



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to