This is an automated email from the ASF dual-hosted git repository.

jin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-hugegraph.git

commit bd1d9db77b5d263f4dc0f8cc38198f5f26b65cb8
Author: VGalaxies <[email protected]>
AuthorDate: Thu Apr 4 00:00:29 2024 +0800

    feat(pd): integrate `pd-test` submodule
    
    prepare tests for `pd-common`
---
 hugegraph-pd/hg-pd-test/pom.xml                    | 259 ++++++++++++++
 .../apache/hugegraph/pd/common/BaseCommonTest.java |  34 ++
 .../hugegraph/pd/common/CommonSuiteTest.java       |  36 ++
 .../apache/hugegraph/pd/common/HgAssertTest.java   | 132 +++++++
 .../org/apache/hugegraph/pd/common/KVPairTest.java |  72 ++++
 .../hugegraph/pd/common/PartitionCacheTest.java    | 388 +++++++++++++++++++++
 .../hugegraph/pd/common/PartitionUtilsTest.java    |  54 +++
 .../hg-pd-test/src/main/resources/log4j2.xml       | 139 ++++++++
 8 files changed, 1114 insertions(+)

diff --git a/hugegraph-pd/hg-pd-test/pom.xml b/hugegraph-pd/hg-pd-test/pom.xml
new file mode 100644
index 000000000..31c0fd889
--- /dev/null
+++ b/hugegraph-pd/hg-pd-test/pom.xml
@@ -0,0 +1,259 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+  -->
+
+<project xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xmlns="http://maven.apache.org/POM/4.0.0";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+    <parent>
+        <groupId>org.apache.hugegraph</groupId>
+        <artifactId>hugegraph-pd</artifactId>
+        <version>${revision}</version>
+        <relativePath>../pom.xml</relativePath>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>hg-pd-test</artifactId>
+
+    <properties>
+        <skip.dump>true</skip.dump>
+        <!--maven.test.skip>true</maven.test.skip-->
+        <powermock.version>2.0.0-RC.3</powermock.version>
+    </properties>
+
+    <profiles>
+        <profile>
+            <id>jacoco</id>
+            <activation>
+                <activeByDefault>false</activeByDefault>
+            </activation>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>org.jacoco</groupId>
+                        <artifactId>jacoco-maven-plugin</artifactId>
+                        <version>0.8.4</version>
+                        <configuration>
+                            <excludes>
+                                <exclude>**/grpc/**.*</exclude>
+                                <exclude>**/config/**.*</exclude>
+                            </excludes>
+                        </configuration>
+                        <executions>
+                            <execution>
+                                <goals>
+                                    <goal>prepare-agent</goal>
+                                </goals>
+                            </execution>
+                        </executions>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+    </profiles>
+
+    <dependencies>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <version>4.13.2</version>
+        </dependency>
+        <dependency>
+            <groupId>org.projectlombok</groupId>
+            <artifactId>lombok</artifactId>
+            <version>1.18.24</version>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-context-support</artifactId>
+            <version>5.3.20</version>
+        </dependency>
+        <dependency>
+            <groupId>org.springframework</groupId>
+            <artifactId>spring-test</artifactId>
+            <version>5.3.20</version>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.logging.log4j</groupId>
+            <artifactId>log4j-slf4j-impl</artifactId>
+            <version>${log4j2.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.hugegraph</groupId>
+            <artifactId>hg-pd-common</artifactId>
+            <version>${revision}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>com.google.code.gson</groupId>
+            <artifactId>gson</artifactId>
+            <version>2.8.9</version>
+        </dependency>
+        <dependency>
+            <groupId>commons-io</groupId>
+            <artifactId>commons-io</artifactId>
+            <version>2.7</version>
+        </dependency>
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-databind</artifactId>
+            <version>2.13.0</version>
+        </dependency>
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-core</artifactId>
+            <version>2.13.0</version>
+        </dependency>
+        <dependency>
+            <groupId>com.fasterxml.jackson.core</groupId>
+            <artifactId>jackson-annotations</artifactId>
+            <version>2.13.0</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.springframework.boot</groupId>
+            <artifactId>spring-boot-starter-test</artifactId>
+            <version>2.5.14</version>
+            <exclusions>
+                <exclusion>
+                    <groupId>org.springframework.boot</groupId>
+                    <artifactId>spring-boot-starter-logging</artifactId>
+                </exclusion>
+            </exclusions>
+        </dependency>
+
+        <dependency>
+            <groupId>org.powermock</groupId>
+            <artifactId>powermock-classloading-xstream</artifactId>
+            <version>${powermock.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.powermock</groupId>
+            <artifactId>powermock-module-junit4-rule</artifactId>
+            <version>${powermock.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.powermock</groupId>
+            <artifactId>powermock-api-support</artifactId>
+            <version>${powermock.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.powermock</groupId>
+            <artifactId>powermock-module-junit4</artifactId>
+            <version>${powermock.version}</version>
+            <scope>compile</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.powermock</groupId>
+            <artifactId>powermock-api-mockito2</artifactId>
+            <version>${powermock.version}</version>
+            <scope>compile</scope>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.commons</groupId>
+            <artifactId>commons-lang3</artifactId>
+            <version>3.14.0</version>
+            <scope>compile</scope>
+        </dependency>
+    </dependencies>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-surefire-plugin</artifactId>
+                <version>2.20</version>
+                <executions>
+                    <execution>
+                        <id>pd-common-test</id>
+                        <configuration>
+                            <testSourceDirectory>${basedir}/src/main/java/
+                            </testSourceDirectory>
+                            <testClassesDirectory>${basedir}/target/classes/
+                            </testClassesDirectory>
+                            <includes>
+                                <include>**/CommonSuiteTest.java</include>
+                            </includes>
+                        </configuration>
+                    </execution>
+                    <execution>
+                        <id>pd-client-test</id>
+                        <configuration>
+                            <testSourceDirectory>${basedir}/src/main/java/
+                            </testSourceDirectory>
+                            <testClassesDirectory>${basedir}/target/classes/
+                            </testClassesDirectory>
+                            <includes>
+                                <include>**/PDClientSuiteTest.java</include>
+                            </includes>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.jacoco</groupId>
+                <artifactId>jacoco-maven-plugin</artifactId>
+                <version>0.8.4</version>
+                <executions>
+                    <execution>
+                        <id>pre-test</id>
+                        <goals>
+                            <goal>prepare-agent</goal>
+                        </goals>
+                    </execution>
+                    <execution>
+                        <id>post-test</id>
+                        <phase>test</phase>
+                        <goals>
+                            <goal>report-aggregate</goal>
+                        </goals>
+                        <configuration>
+                            
<outputDirectory>${basedir}/target/site/jacoco</outputDirectory>
+                        </configuration>
+                    </execution>
+                </executions>
+                <configuration>
+                    <excludes>
+                        <exclude>org/apache/hugegraph/pd/rest/*.class</exclude>
+                        
<exclude>org/apache/hugegraph/pd/service/*.class</exclude>
+                        
<exclude>org/apache/hugegraph/pd/model/*.class</exclude>
+                        
<exclude>org/apache/hugegraph/pd/watch/*.class</exclude>
+                        
<exclude>org/apache/hugegraph/pd/pulse/*.class</exclude>
+                        
<exclude>org/apache/hugegraph/pd/license/*.class</exclude>
+                        
<exclude>org/apache/hugegraph/pd/notice/*.class</exclude>
+                        <exclude>org/apache/hugegraph/pd/util/*.class</exclude>
+                        
<exclude>org/apache/hugegraph/pd/metrics/*.class</exclude>
+                        
<exclude>org/apache/hugegraph/pd/util/grpc/*.class</exclude>
+                        <exclude>org/apache/hugegraph/pd/boot/*.class</exclude>
+                        
<exclude>org/apache/hugegraph/pd/grpc/**/*.class</exclude>
+                        <exclude>org/apache/hugegraph/pd/raft/*.class</exclude>
+                        <exclude>**/RaftKVStore.class</exclude>
+                    </excludes>
+                </configuration>
+            </plugin>
+        </plugins>
+        <resources>
+            <resource>
+                <directory>src/main/resources/</directory>
+                <filtering>true</filtering>
+            </resource>
+        </resources>
+    </build>
+</project>
diff --git 
a/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/BaseCommonTest.java
 
b/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/BaseCommonTest.java
new file mode 100644
index 000000000..fb4478e3d
--- /dev/null
+++ 
b/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/BaseCommonTest.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.pd.common;
+
+import org.junit.After;
+import org.junit.BeforeClass;
+
+public class BaseCommonTest {
+
+    @BeforeClass
+    public static void init() {
+
+    }
+
+    @After
+    public void teardown() {
+        // pass
+    }
+}
diff --git 
a/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/CommonSuiteTest.java
 
b/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/CommonSuiteTest.java
new file mode 100644
index 000000000..02a5dfca6
--- /dev/null
+++ 
b/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/CommonSuiteTest.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.pd.common;
+
+import org.junit.runner.RunWith;
+import org.junit.runners.Suite;
+
+import lombok.extern.slf4j.Slf4j;
+
+@RunWith(Suite.class)
[email protected]({
+        PartitionUtilsTest.class,
+        PartitionCacheTest.class,
+        HgAssertTest.class,
+        KVPairTest.class,
+})
+
+@Slf4j
+public class CommonSuiteTest {
+
+}
diff --git 
a/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/HgAssertTest.java
 
b/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/HgAssertTest.java
new file mode 100644
index 000000000..3e61dd0a9
--- /dev/null
+++ 
b/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/HgAssertTest.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.pd.common;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+
+import org.junit.Test;
+
+public class HgAssertTest {
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testIsTrue() {
+        HgAssert.isTrue(false, "");
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testIsTrue2() {
+        HgAssert.isTrue(true, null);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testIsFalse() {
+        HgAssert.isFalse(true, "");
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testIsFalse2() {
+        HgAssert.isTrue(false, null);
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void isArgumentValid() {
+        HgAssert.isArgumentValid(new byte[0], "");
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void isArgumentValidStr() {
+        HgAssert.isArgumentValid("", "");
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testIsArgumentNotNull() {
+        HgAssert.isArgumentNotNull(null, "");
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testIstValid() {
+        HgAssert.istValid(new byte[0], "");
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testIstValidStr() {
+        HgAssert.isValid("", "");
+    }
+
+    @Test(expected = IllegalArgumentException.class)
+    public void testIsNotNull() {
+        HgAssert.isNotNull(null, "");
+    }
+
+    @Test
+    public void testIsInvalid() {
+        assertFalse(HgAssert.isInvalid("abc", "test"));
+        assertTrue(HgAssert.isInvalid("", null));
+    }
+
+    @Test
+    public void testIsInvalidByte() {
+        assertTrue(HgAssert.isInvalid(new byte[0]));
+        assertFalse(HgAssert.isInvalid(new byte[1]));
+    }
+
+    @Test
+    public void testIsInvalidMap() {
+        assertTrue(HgAssert.isInvalid(new HashMap<Integer, Integer>()));
+        assertFalse(HgAssert.isInvalid(new HashMap<Integer, Integer>() {{
+            put(1, 1);
+        }}));
+    }
+
+    @Test
+    public void testIsInvalidCollection() {
+        assertTrue(HgAssert.isInvalid(new ArrayList<Integer>()));
+        assertFalse(HgAssert.isInvalid(new ArrayList<Integer>() {{
+            add(1);
+        }}));
+    }
+
+    @Test
+    public void testIsContains() {
+        assertTrue(HgAssert.isContains(new Object[]{Integer.valueOf(1), 
Long.valueOf(2)},
+                                       Long.valueOf(2)));
+        assertFalse(HgAssert.isContains(new Object[]{Integer.valueOf(1), 
Long.valueOf(2)},
+                                        Long.valueOf(3)));
+    }
+
+    @Test
+    public void testIsContainsT() {
+        assertTrue(HgAssert.isContains(new ArrayList<>() {{
+            add(1);
+        }}, 1));
+        assertFalse(HgAssert.isContains(new ArrayList<>() {{
+            add(1);
+        }}, 2));
+    }
+
+    @Test
+    public void testIsNull() {
+        assertTrue(HgAssert.isNull(null));
+        assertFalse(HgAssert.isNull("abc", "cdf"));
+    }
+
+}
diff --git 
a/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/KVPairTest.java
 
b/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/KVPairTest.java
new file mode 100644
index 000000000..9fb676d39
--- /dev/null
+++ 
b/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/KVPairTest.java
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.pd.common;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+public class KVPairTest {
+
+    KVPair<String, Integer> pair;
+
+    @Before
+    public void init() {
+        this.pair = new KVPair<>("key", 1);
+    }
+
+    @Test
+    public void testGetKey() {
+        assertEquals(this.pair.getKey(), "key");
+    }
+
+    @Test
+    public void testSetKey() {
+        this.pair.setKey("key2");
+        assertEquals(this.pair.getKey(), "key2");
+    }
+
+    @Test
+    public void testGetValue() {
+        assertEquals(1, this.pair.getValue());
+    }
+
+    @Test
+    public void testSetValue() {
+        this.pair.setValue(2);
+        assertEquals(2, this.pair.getValue());
+    }
+
+    @Test
+    public void testToString() {
+
+    }
+
+    @Test
+    public void testHashCode() {
+
+    }
+
+    @Test
+    public void testEquals() {
+        var pair2 = new KVPair<>("key", 1);
+        Assert.assertEquals(pair2, this.pair);
+    }
+}
diff --git 
a/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/PartitionCacheTest.java
 
b/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/PartitionCacheTest.java
new file mode 100644
index 000000000..21e757ffa
--- /dev/null
+++ 
b/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/PartitionCacheTest.java
@@ -0,0 +1,388 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.pd.common;
+
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNotNull;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.hugegraph.pd.grpc.Metapb;
+import org.junit.Before;
+import org.junit.Test;
+
+public class PartitionCacheTest {
+
+    private PartitionCache cache;
+
+    private static Metapb.Partition createPartition(int pid, String graphName, 
long start,
+                                                    long end) {
+        return Metapb.Partition.newBuilder()
+                               .setId(pid)
+                               .setGraphName(graphName)
+                               .setStartKey(start)
+                               .setEndKey(end)
+                               .setState(Metapb.PartitionState.PState_Normal)
+                               .setVersion(1)
+                               .build();
+    }
+
+    private static Metapb.ShardGroup creteShardGroup(int pid) {
+        return Metapb.ShardGroup.newBuilder()
+                                .addShards(
+                                        Metapb.Shard.newBuilder().setStoreId(0)
+                                                    
.setRole(Metapb.ShardRole.Leader).build()
+                                )
+                                .setId(pid)
+                                .setVersion(0)
+                                .setConfVer(0)
+                                .setState(Metapb.PartitionState.PState_Normal)
+                                .build();
+    }
+
+    private static Metapb.Shard createShard() {
+        return Metapb.Shard.newBuilder()
+                           .setStoreId(0)
+                           .setRole(Metapb.ShardRole.Leader)
+                           .build();
+    }
+
+    private static Metapb.Store createStore(long storeId) {
+        return Metapb.Store.newBuilder()
+                           .setId(storeId)
+                           .setAddress("127.0.0.1")
+                           .setCores(4)
+                           .setVersion("1")
+                           .setDataPath("/tmp/junit")
+                           .setDataVersion(1)
+                           .setLastHeartbeat(System.currentTimeMillis())
+                           .setStartTimestamp(System.currentTimeMillis())
+                           .setState(Metapb.StoreState.Up)
+                           .setDeployPath("/tmp/junit")
+                           .build();
+    }
+
+    private static Metapb.Graph createGraph(String graphName, int 
partitionCount) {
+        return Metapb.Graph.newBuilder()
+                           .setGraphName(graphName)
+                           .setPartitionCount(partitionCount)
+                           .setState(Metapb.PartitionState.PState_Normal)
+                           .build();
+    }
+
+    private static Metapb.ShardGroup createShardGroup() {
+        List<Metapb.Shard> shards = new ArrayList<>();
+        for (int i = 0; i < 3; i++) {
+            shards.add(Metapb.Shard.newBuilder()
+                                   .setStoreId(i)
+                                   .setRole(i == 0 ? Metapb.ShardRole.Leader :
+                                            Metapb.ShardRole.Follower)
+                                   .build()
+            );
+        }
+
+        return Metapb.ShardGroup.newBuilder()
+                                .setId(1)
+                                .setVersion(1)
+                                .setConfVer(1)
+                                .setState(Metapb.PartitionState.PState_Normal)
+                                .addAllShards(shards)
+                                .build();
+    }
+
+    @Before
+    public void setup() {
+        this.cache = new PartitionCache();
+    }
+
+    @Test
+    public void testGetPartitionById() {
+        var partition = createPartition(0, "graph0", 0, 65535);
+        this.cache.updateShardGroup(creteShardGroup(0));
+        this.cache.updatePartition(partition);
+        var ret = this.cache.getPartitionById("graph0", 0);
+        assertNotNull(ret);
+        assertEquals(ret.getKey(), partition);
+    }
+
+    @Test
+    public void testGetPartitionByKey() throws UnsupportedEncodingException {
+        var partition = createPartition(0, "graph0", 0, 65535);
+        this.cache.updateShardGroup(creteShardGroup(0));
+        this.cache.updatePartition(partition);
+        var ret = this.cache.getPartitionByKey("graph0", 
"0".getBytes(StandardCharsets.UTF_8));
+        assertNotNull(ret);
+        assertEquals(ret.getKey(), partition);
+    }
+
+    @Test
+    public void getPartitionByCode() {
+        var partition = createPartition(0, "graph0", 0, 1024);
+        this.cache.updateShardGroup(creteShardGroup(0));
+        this.cache.updatePartition(partition);
+        var ret = this.cache.getPartitionByCode("graph0", 10);
+        assertNotNull(ret);
+        assertEquals(ret.getKey(), partition);
+        assertNull(this.cache.getPartitionByCode("graph0", 2000));
+    }
+
+    @Test
+    public void testGetPartitions() {
+        var partition1 = createPartition(0, "graph0", 0, 1024);
+        this.cache.updateShardGroup(creteShardGroup(0));
+        this.cache.updatePartition(partition1);
+        assertEquals(this.cache.getPartitions("graph0").size(), 1);
+        var partition2 = createPartition(1, "graph0", 1024, 2048);
+        this.cache.updateShardGroup(creteShardGroup(1));
+        this.cache.updatePartition(partition2);
+        assertEquals(this.cache.getPartitions("graph0").size(), 2);
+        System.out.println(this.cache.debugCacheByGraphName("graph0"));
+    }
+
+    @Test
+    public void testAddPartition() {
+        var partition = createPartition(0, "graph0", 0, 65535);
+        this.cache.addPartition("graph0", 0, partition);
+        var ret = this.cache.getPartitionById("graph0", 0);
+        assertNotNull(ret);
+        assertEquals(ret.getKey(), partition);
+        assertNotNull(this.cache.getPartitionByCode("graph0", 2000));
+        System.out.println(this.cache.debugCacheByGraphName("graph0"));
+        var partition2 = createPartition(0, "graph0", 0, 1024);
+        this.cache.addPartition("graph0", 0, partition2);
+        ret = this.cache.getPartitionById("graph0", 0);
+        assertNotNull(ret);
+        assertEquals(ret.getKey(), partition2);
+        assertNull(this.cache.getPartitionByCode("graph0", 2000));
+        System.out.println(this.cache.debugCacheByGraphName("graph0"));
+    }
+
+    @Test
+    public void testUpdatePartition() {
+        var partition = createPartition(0, "graph0", 0, 65535);
+        this.cache.updateShardGroup(creteShardGroup(0));
+        this.cache.addPartition("graph0", 0, partition);
+        var partition2 = createPartition(0, "graph0", 0, 1024);
+        this.cache.updatePartition("graph0", 0, partition2);
+        var ret = this.cache.getPartitionById("graph0", 0);
+        assertNotNull(ret);
+        assertEquals(ret.getKey(), partition2);
+        assertNull(this.cache.getPartitionByCode("graph0", 2000));
+    }
+
+    @Test
+    public void testUpdatePartition2() {
+        var partition = createPartition(0, "graph0", 0, 1024);
+        this.cache.updateShardGroup(creteShardGroup(0));
+        assertTrue(this.cache.updatePartition(partition));
+        assertFalse(this.cache.updatePartition(partition));
+        var ret = this.cache.getPartitionById("graph0", 0);
+        assertNotNull(ret);
+        assertEquals(ret.getKey(), partition);
+        assertNull(this.cache.getPartitionByCode("graph0", 2000));
+    }
+
+    @Test
+    public void testRemovePartition() {
+        var partition = createPartition(0, "graph0", 0, 1024);
+        this.cache.updateShardGroup(creteShardGroup(0));
+        this.cache.updatePartition(partition);
+        assertNotNull(this.cache.getPartitionById("graph0", 0));
+        this.cache.removePartition("graph0", 0);
+        assertNull(this.cache.getPartitionById("graph0", 0));
+        System.out.println(this.cache.debugCacheByGraphName("graph0"));
+    }
+
+    @Test
+    public void testRange() {
+        var partition1 = createPartition(1, "graph0", 0, 3);
+        var partition2 = createPartition(2, "graph0", 3, 6);
+        this.cache.updatePartition(partition1);
+        this.cache.updatePartition(partition2);
+
+        var partition3 = createPartition(3, "graph0", 1, 2);
+        var partition4 = createPartition(4, "graph0", 2, 3);
+        this.cache.updatePartition(partition3);
+        this.cache.updatePartition(partition4);
+        System.out.println(this.cache.debugCacheByGraphName("graph0"));
+
+        var partition6 = createPartition(1, "graph0", 0, 1);
+        this.cache.updatePartition(partition6);
+        System.out.println(this.cache.debugCacheByGraphName("graph0"));
+
+        var partition5 = createPartition(1, "graph0", 0, 3);
+        this.cache.updatePartition(partition5);
+        System.out.println(this.cache.debugCacheByGraphName("graph0"));
+    }
+
+    @Test
+    public void testRange2() {
+        var partition1 = createPartition(1, "graph0", 0, 3);
+        var partition2 = createPartition(2, "graph0", 3, 6);
+        this.cache.updatePartition(partition1);
+        this.cache.updatePartition(partition2);
+        System.out.println(this.cache.debugCacheByGraphName("graph0"));
+
+        // 中间有缺失
+        var partition3 = createPartition(1, "graph0", 2, 3);
+        this.cache.updatePartition(partition3);
+        System.out.println(this.cache.debugCacheByGraphName("graph0"));
+
+        var partition5 = createPartition(1, "graph0", 0, 3);
+        this.cache.updatePartition(partition5);
+        System.out.println(this.cache.debugCacheByGraphName("graph0"));
+    }
+
+    @Test
+    public void testRemovePartitions() {
+        var partition1 = createPartition(0, "graph0", 0, 1024);
+        var partition2 = createPartition(1, "graph0", 1024, 2048);
+        this.cache.updateShardGroup(creteShardGroup(0));
+        this.cache.updatePartition(partition1);
+        this.cache.updateShardGroup(creteShardGroup(1));
+        this.cache.updatePartition(partition2);
+        assertEquals(this.cache.getPartitions("graph0").size(), 2);
+        this.cache.removePartitions();
+        assertEquals(this.cache.getPartitions("graph0").size(), 0);
+    }
+
+    @Test
+    public void testRemoveAll() {
+        var partition1 = createPartition(0, "graph0", 0, 1024);
+        var partition2 = createPartition(1, "graph0", 1024, 2048);
+        var partition3 = createPartition(0, "graph1", 0, 2048);
+        this.cache.updateShardGroup(creteShardGroup(0));
+        this.cache.updateShardGroup(creteShardGroup(1));
+        this.cache.updatePartition(partition1);
+        this.cache.updatePartition(partition2);
+        this.cache.updatePartition(partition3);
+
+        assertEquals(this.cache.getPartitions("graph0").size(), 2);
+        assertEquals(this.cache.getPartitions("graph1").size(), 1);
+        this.cache.removeAll("graph0");
+        assertEquals(this.cache.getPartitions("graph0").size(), 0);
+        assertEquals(this.cache.getPartitions("graph1").size(), 1);
+    }
+
+    @Test
+    public void testUpdateShardGroup() {
+        var shardGroup = createShardGroup();
+        this.cache.updateShardGroup(shardGroup);
+        assertNotNull(this.cache.getShardGroup(shardGroup.getId()));
+    }
+
+    @Test
+    public void testGetShardGroup() {
+        var shardGroup = createShardGroup();
+        this.cache.updateShardGroup(shardGroup);
+        assertEquals(this.cache.getShardGroup(shardGroup.getId()), shardGroup);
+    }
+
+    @Test
+    public void testAddStore() {
+        var store = createStore(1);
+        this.cache.addStore(1L, store);
+        assertEquals(this.cache.getStoreById(1L), store);
+    }
+
+    @Test
+    public void testGetStoreById() {
+        var store = createStore(1);
+        this.cache.addStore(1L, store);
+        assertEquals(this.cache.getStoreById(1L), store);
+    }
+
+    @Test
+    public void testRemoveStore() {
+        var store = createStore(1);
+        this.cache.addStore(1L, store);
+        assertEquals(this.cache.getStoreById(1L), store);
+
+        this.cache.removeStore(1L);
+        assertNull(this.cache.getStoreById(1L));
+    }
+
+    @Test
+    public void testHasGraph() {
+        var partition = createPartition(0, "graph0", 0, 65535);
+        this.cache.updateShardGroup(creteShardGroup(0));
+        this.cache.updatePartition(partition);
+        assertTrue(this.cache.hasGraph("graph0"));
+        assertFalse(this.cache.hasGraph("graph1"));
+    }
+
+    @Test
+    public void testUpdateGraph() {
+        var graph = createGraph("graph0", 10);
+        this.cache.updateGraph(graph);
+        assertEquals(this.cache.getGraph("graph0"), graph);
+        graph = createGraph("graph0", 12);
+        this.cache.updateGraph(graph);
+        assertEquals(this.cache.getGraph("graph0"), graph);
+    }
+
+    @Test
+    public void testGetGraph() {
+        var graph = createGraph("graph0", 12);
+        this.cache.updateGraph(graph);
+        assertEquals(this.cache.getGraph("graph0"), graph);
+    }
+
+    @Test
+    public void testGetGraphs() {
+        var graph1 = createGraph("graph0", 12);
+        var graph2 = createGraph("graph1", 12);
+        var graph3 = createGraph("graph2", 12);
+        this.cache.updateGraph(graph1);
+        this.cache.updateGraph(graph2);
+        this.cache.updateGraph(graph3);
+        assertEquals(this.cache.getGraphs().size(), 3);
+    }
+
+    @Test
+    public void testReset() {
+        var graph1 = createGraph("graph0", 12);
+        var graph2 = createGraph("graph1", 12);
+        var graph3 = createGraph("graph2", 12);
+        this.cache.updateGraph(graph1);
+        this.cache.updateGraph(graph2);
+        this.cache.updateGraph(graph3);
+        assertEquals(this.cache.getGraphs().size(), 3);
+        this.cache.reset();
+        assertEquals(this.cache.getGraphs().size(), 0);
+    }
+
+    @Test
+    public void testUpdateShardGroupLeader() {
+        var shardGroup = createShardGroup();
+        this.cache.updateShardGroup(shardGroup);
+
+        var leader =
+                
Metapb.Shard.newBuilder().setStoreId(2).setRole(Metapb.ShardRole.Leader).build();
+        this.cache.updateShardGroupLeader(shardGroup.getId(), leader);
+
+        assertEquals(this.cache.getLeaderShard(shardGroup.getId()), leader);
+    }
+
+}
diff --git 
a/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/PartitionUtilsTest.java
 
b/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/PartitionUtilsTest.java
new file mode 100644
index 000000000..e0742a483
--- /dev/null
+++ 
b/hugegraph-pd/hg-pd-test/src/main/java/org/apache/hugegraph/pd/common/PartitionUtilsTest.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hugegraph.pd.common;
+
+import java.nio.charset.StandardCharsets;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import lombok.extern.slf4j.Slf4j;
+
+@Slf4j
+public class PartitionUtilsTest extends BaseCommonTest {
+
+    @Test
+    public void testCalcHashcode() {
+        byte[] key = new byte[5];
+        long code = PartitionUtils.calcHashcode(key);
+        Assert.assertEquals(code, 31912L);
+    }
+
+    // @Test
+    public void testHashCode() {
+        int partCount = 10;
+        int partSize = PartitionUtils.MAX_VALUE / partCount + 1;
+        int[] counter = new int[partCount];
+        for (int i = 0; i < 10000; i++) {
+            String s = String.format("BATCH-GET-UNIT-%02d", i);
+            int c = 
PartitionUtils.calcHashcode(s.getBytes(StandardCharsets.UTF_8));
+
+            counter[c / partSize]++;
+
+        }
+
+        for (int i = 0; i < counter.length; i++) {
+            System.out.println(i + " " + counter[i]);
+        }
+    }
+}
diff --git a/hugegraph-pd/hg-pd-test/src/main/resources/log4j2.xml 
b/hugegraph-pd/hg-pd-test/src/main/resources/log4j2.xml
new file mode 100644
index 000000000..e462bf16e
--- /dev/null
+++ b/hugegraph-pd/hg-pd-test/src/main/resources/log4j2.xml
@@ -0,0 +1,139 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+  -->
+
+<!-- Config will be autoloaded every 60s -->
+<configuration status="error" monitorInterval="60">
+    <properties>
+        <property name="LOG_PATH">logs</property>
+        <property name="FILE_NAME">hg-pd-test</property>
+    </properties>
+
+    <appenders>
+        <Console name="console" target="SYSTEM_OUT">
+            <ThresholdFilter level="DEBUG" onMatch="ACCEPT" onMismatch="DENY" 
/>
+            <PatternLayout pattern="%-d{yyyy-MM-dd HH:mm:ss} [%t] [%p] %c{1.} 
- %m%n" />
+        </Console>
+
+        <!-- Normal server log config -->
+        <RollingRandomAccessFile name="file" 
fileName="${LOG_PATH}/${FILE_NAME}.log"
+                                 
filePattern="${LOG_PATH}/$${date:yyyy-MM}/${FILE_NAME}-%d{yyyy-MM-dd}-%i.log"
+                                 bufferedIO="true" bufferSize="524288" 
immediateFlush="false">
+            <ThresholdFilter level="TRACE" onMatch="ACCEPT" onMismatch="DENY" 
/>
+            <PatternLayout pattern="%-d{yyyy-MM-dd HH:mm:ss} [%t] [%p] %c{1.} 
- %m%n" />
+            <!--JsonLayout compact="true" eventEol="true" complete="true" 
locationInfo="true">
+                <KeyValuePair key="timestamp" value="$${date:yyyy-MM-dd 
HH:mm:ss.SSS}"/>
+            </JsonLayout-->
+            <!-- Trigger after exceeding 1day or 50MB -->
+            <Policies>
+                <SizeBasedTriggeringPolicy size="128MB" />
+                <TimeBasedTriggeringPolicy interval="1" modulate="true" />
+            </Policies>
+            <!-- Keep 5 files per day & auto delete after over 2GB or 100 
files -->
+            <DefaultRolloverStrategy max="16">
+                <Delete basePath="${LOG_PATH}" maxDepth="2">
+                    <IfFileName glob="*/*.log" />
+                    <!-- Limit log amount & size -->
+                    <IfAny>
+                        <IfAccumulatedFileSize exceeds="2GB" />
+                        <IfAccumulatedFileCount exceeds="100" />
+                    </IfAny>
+                </Delete>
+            </DefaultRolloverStrategy>
+        </RollingRandomAccessFile>
+
+        <!-- jraft server log config -->
+        <RollingRandomAccessFile name="raft_file" 
fileName="${LOG_PATH}/${FILE_NAME}_raft.log"
+                                 
filePattern="${LOG_PATH}/$${date:yyyy-MM}/${FILE_NAME}-%d{yyyy-MM-dd}-%i.log"
+                                 bufferedIO="true" bufferSize="524288" 
immediateFlush="false">
+            <ThresholdFilter level="TRACE" onMatch="ACCEPT" onMismatch="DENY" 
/>
+            <PatternLayout pattern="%-d{yyyy-MM-dd HH:mm:ss} [%t] [%p] %c{1.} 
- %m%n" />
+            <!--JsonLayout compact="true" eventEol="true" complete="true" 
locationInfo="true">
+                <KeyValuePair key="timestamp" value="$${date:yyyy-MM-dd 
HH:mm:ss.SSS}"/>
+            </JsonLayout-->
+            <!-- Trigger after exceeding 1day or 50MB -->
+            <Policies>
+                <SizeBasedTriggeringPolicy size="128MB" />
+                <TimeBasedTriggeringPolicy interval="1" modulate="true" />
+            </Policies>
+            <!-- Keep 5 files per day & auto Delete after over 2GB or 100 
files -->
+            <DefaultRolloverStrategy max="16">
+                <Delete basePath="${LOG_PATH}" maxDepth="2">
+                    <IfFileName glob="*/*.log" />
+                    <!-- Limit log amount & size -->
+                    <IfAny>
+                        <IfAccumulatedFileSize exceeds="2GB" />
+                        <IfAccumulatedFileCount exceeds="100" />
+                    </IfAny>
+                </Delete>
+            </DefaultRolloverStrategy>
+        </RollingRandomAccessFile>
+
+        <!-- Separate & compress audit log, buffer size is 512KB -->
+        <RollingRandomAccessFile name="audit" 
fileName="${LOG_PATH}/audit-${FILE_NAME}.log"
+                                 
filePattern="${LOG_PATH}/$${date:yyyy-MM}/audit-${FILE_NAME}-%d{yyyy-MM-dd-HH}-%i.gz"
+                                 bufferedIO="true" bufferSize="524288" 
immediateFlush="false">
+            <ThresholdFilter level="TRACE" onMatch="ACCEPT" onMismatch="DENY" 
/>
+            <!-- Use simple format for audit log to speed up -->
+            <!-- PatternLayout pattern="%-d{yyyy-MM-dd HH:mm:ss} - %m%n"/ -->
+            <JsonLayout compact="true" eventEol="true" locationInfo="true">
+                <KeyValuePair key="timestamp" value="$${date:yyyy-MM-dd 
HH:mm:ss.SSS}" />
+            </JsonLayout>
+            <!-- Trigger after exceeding 1hour or 500MB -->
+            <Policies>
+                <SizeBasedTriggeringPolicy size="512MB" />
+                <TimeBasedTriggeringPolicy interval="1" modulate="true" />
+            </Policies>
+            <!-- Keep 2 files per hour & auto delete [after 60 days] or [over 
5GB or 500 files] -->
+            <DefaultRolloverStrategy max="16">
+                <Delete basePath="${LOG_PATH}" maxDepth="2">
+                    <IfFileName glob="*/*.gz" />
+                    <IfLastModified age="60d" />
+                    <IfAny>
+                        <IfAccumulatedFileSize exceeds="5GB" />
+                        <IfAccumulatedFileCount exceeds="500" />
+                    </IfAny>
+                </Delete>
+            </DefaultRolloverStrategy>
+        </RollingRandomAccessFile>
+    </appenders>
+
+    <loggers>
+        <root level="INFO">
+            <appender-ref ref="file" />
+            <appender-ref ref="console" />
+        </root>
+        <logger name="com.alipay.sofa" level="INFO" additivity="false">
+            <appender-ref ref="raft_file" />
+            <appender-ref ref="console" />
+        </logger>
+        <logger name="io.netty" level="INFO" additivity="false">
+            <appender-ref ref="file" />
+            <appender-ref ref="console" />
+        </logger>
+        <logger name="org.apache.commons" level="INFO" additivity="false">
+            <appender-ref ref="file" />
+            <appender-ref ref="console" />
+        </logger>
+        <!-- Use mixed async way to output logs -->
+        <AsyncLogger name="org.apache.hugegraph" level="INFO" 
additivity="false">
+            <appender-ref ref="file" />
+            <appender-ref ref="console" />
+        </AsyncLogger>
+    </loggers>
+</configuration>


Reply via email to