This is an automated email from the ASF dual-hosted git repository.

jin pushed a commit to branch master
in repository 
https://gitbox.apache.org/repos/asf/incubator-hugegraph-toolchain.git


The following commit(s) were added to refs/heads/master by this push:
     new 18ae76b3 fix: checkstyle && add suppressions.xml (#500)
18ae76b3 is described below

commit 18ae76b3ef43fae319b86bb27c89fabe88a8c9c4
Author: Liu Xiao <[email protected]>
AuthorDate: Mon Sep 4 17:49:18 2023 +0800

    fix: checkstyle && add suppressions.xml (#500)
    
    continue for this pr: #385
    
    ---------
    
    Co-authored-by: imbajin <[email protected]>
---
 .../api/traverser/JaccardSimilarityAPI.java        |  1 -
 .../exception/InvalidOperationException.java       |  1 -
 .../hugegraph/exception/ServerException.java       |  5 +++
 .../serializer/direct/HBaseSerializer.java         | 23 +++++++------
 .../serializer/direct/reuse/BytesDemo.java         | 16 ++++-----
 .../serializer/direct/util/IdGenerator.java        | 16 ++++-----
 .../direct/util/SplicingIdGenerator.java           | 20 ++++++------
 .../org/apache/hugegraph/config/HubbleConfig.java  |  5 +++
 .../controller/load/LoadTaskController.java        |  2 +-
 .../org/apache/hugegraph/license/ServerInfo.java   |  2 --
 .../hugegraph/service/load/LoadTaskService.java    | 14 ++++----
 .../service/query/ExecuteHistoryService.java       |  8 ++---
 .../org/apache/hugegraph/util/GremlinUtil.java     |  8 ++---
 .../org/apache/hugegraph/util/SerializeUtil.java   |  1 -
 .../loader/flink/HugeGraphOutputFormat.java        | 10 +++---
 .../loader/reader/file/LocalFileReader.java        |  1 -
 .../loader/spark/HugeGraphSparkLoader.java         | 18 +++++-----
 .../apache/hugegraph/loader/task/InsertTask.java   | 20 ++++++------
 .../java/org/apache/hugegraph/base/Printer.java    |  2 +-
 .../java/org/apache/hugegraph/base/ToolClient.java |  3 +-
 .../org/apache/hugegraph/cmd/HugeGraphCommand.java |  4 +--
 .../manager/AuthBackupRestoreManager.java          | 38 +++++++++++-----------
 pom.xml                                            |  3 +-
 checkstyle.xml => tools/checkstyle.xml             |  3 +-
 tools/suppressions.xml                             | 26 +++++++++++++++
 25 files changed, 139 insertions(+), 111 deletions(-)

diff --git 
a/hugegraph-client/src/main/java/org/apache/hugegraph/api/traverser/JaccardSimilarityAPI.java
 
b/hugegraph-client/src/main/java/org/apache/hugegraph/api/traverser/JaccardSimilarityAPI.java
index 41a827ae..80fe8b48 100644
--- 
a/hugegraph-client/src/main/java/org/apache/hugegraph/api/traverser/JaccardSimilarityAPI.java
+++ 
b/hugegraph-client/src/main/java/org/apache/hugegraph/api/traverser/JaccardSimilarityAPI.java
@@ -28,7 +28,6 @@ import org.apache.hugegraph.rest.RestResult;
 import org.apache.hugegraph.structure.constant.Direction;
 import 
org.apache.hugegraph.structure.traverser.SingleSourceJaccardSimilarityRequest;
 
-
 import org.apache.hugegraph.util.E;
 
 public class JaccardSimilarityAPI extends TraversersAPI {
diff --git 
a/hugegraph-client/src/main/java/org/apache/hugegraph/exception/InvalidOperationException.java
 
b/hugegraph-client/src/main/java/org/apache/hugegraph/exception/InvalidOperationException.java
index f70cbf97..bd5ce915 100644
--- 
a/hugegraph-client/src/main/java/org/apache/hugegraph/exception/InvalidOperationException.java
+++ 
b/hugegraph-client/src/main/java/org/apache/hugegraph/exception/InvalidOperationException.java
@@ -17,7 +17,6 @@
 
 package org.apache.hugegraph.exception;
 
-
 import org.apache.hugegraph.rest.ClientException;
 
 public class InvalidOperationException extends ClientException {
diff --git 
a/hugegraph-client/src/main/java/org/apache/hugegraph/exception/ServerException.java
 
b/hugegraph-client/src/main/java/org/apache/hugegraph/exception/ServerException.java
index 1e0ac578..e718181d 100644
--- 
a/hugegraph-client/src/main/java/org/apache/hugegraph/exception/ServerException.java
+++ 
b/hugegraph-client/src/main/java/org/apache/hugegraph/exception/ServerException.java
@@ -20,11 +20,15 @@ package org.apache.hugegraph.exception;
 import java.util.Map;
 
 import org.apache.hugegraph.rest.RestResult;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 
 import jakarta.ws.rs.core.Response;
 
 public class ServerException extends RuntimeException {
 
+    private static final Logger LOG = 
LoggerFactory.getLogger(ServerException.class);
+
     private static final long serialVersionUID = 6335623004322652358L;
 
     private static final String[] EXCEPTION_KEYS = {"exception",
@@ -52,6 +56,7 @@ public class ServerException extends RuntimeException {
             exception.cause = (String) getByKeys(json, CAUSE_KEYS);
             exception.trace = getByKeys(json, TRACE_KEYS);
         } catch (Exception ignored) {
+            LOG.error("ServerException fromResponse excepiton");
         }
 
         return exception;
diff --git 
a/hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/HBaseSerializer.java
 
b/hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/HBaseSerializer.java
index 9197ecc7..18cb87af 100644
--- 
a/hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/HBaseSerializer.java
+++ 
b/hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/HBaseSerializer.java
@@ -40,8 +40,7 @@ public class HBaseSerializer {
     private HugeClient client;
     private GraphSchema graphSchema;
 
-
-    public HBaseSerializer(HugeClient client, int vertexPartitions, int 
edgePartitions){
+    public HBaseSerializer(HugeClient client, int vertexPartitions, int 
edgePartitions) {
         this.client = client;
         this.graphSchema = new GraphSchema(client);
         this.edgeLogicPartitions = edgePartitions;
@@ -50,12 +49,12 @@ public class HBaseSerializer {
 
     public byte[] getKeyBytes(GraphElement e) {
         byte[] array = null;
-        if(e.type() == "vertex" && e.id() != null){
+        if (e.type() == "vertex" && e.id() != null) {
             BytesBuffer buffer = BytesBuffer.allocate(2 + 1 + 
e.id().toString().length());
             buffer.writeShort(getPartition(HugeType.VERTEX,  
IdGenerator.of(e.id())));
             buffer.writeId(IdGenerator.of(e.id()));
             array = buffer.bytes();
-        }else if ( e.type() == "edge" ){
+        } else if (e.type() == "edge") {
             BytesBuffer buffer = BytesBuffer.allocate(BytesBuffer.BUF_EDGE_ID);
             Edge edge = (Edge)e;
             buffer.writeShort(getPartition(HugeType.EDGE, 
IdGenerator.of(edge.sourceId())));
@@ -71,22 +70,22 @@ public class HBaseSerializer {
 
     public byte[] getValueBytes(GraphElement e) {
         byte[] array = null;
-        if(e.type() == "vertex"){
-            int propsCount = e.properties().size() ; 
//vertex.sizeOfProperties();
+        if (e.type() == "vertex") {
+            int propsCount = e.properties().size(); 
//vertex.sizeOfProperties();
             BytesBuffer buffer = BytesBuffer.allocate(8 + 16 * propsCount);
             
buffer.writeId(IdGenerator.of(graphSchema.getVertexLabel(e.label()).id()));
             buffer.writeVInt(propsCount);
-            for(Map.Entry<String, Object> entry : e.properties().entrySet()){
+            for (Map.Entry<String, Object> entry : e.properties().entrySet()) {
                 PropertyKey propertyKey = 
graphSchema.getPropertyKey(entry.getKey());
                 buffer.writeVInt(propertyKey.id().intValue());
                 buffer.writeProperty(propertyKey.dataType(),entry.getValue());
             }
             array = buffer.bytes();
-        } else if ( e.type() == "edge" ){
+        } else if (e.type() == "edge") {
             int propsCount =  e.properties().size();
             BytesBuffer buffer = BytesBuffer.allocate(4 + 16 * propsCount);
             buffer.writeVInt(propsCount);
-            for(Map.Entry<String, Object> entry : e.properties().entrySet()){
+            for (Map.Entry<String, Object> entry : e.properties().entrySet()) {
                 PropertyKey propertyKey = 
graphSchema.getPropertyKey(entry.getKey());
                 buffer.writeVInt(propertyKey.id().intValue());
                 buffer.writeProperty(propertyKey.dataType(),entry.getValue());
@@ -108,15 +107,15 @@ public class HBaseSerializer {
         return partition > 0 ? partition : (short) -partition;
     }
 
-    public int getEdgeLogicPartitions(){
+    public int getEdgeLogicPartitions() {
         return this.edgeLogicPartitions;
     }
 
-    public int getVertexLogicPartitions(){
+    public int getVertexLogicPartitions() {
         return this.vertexLogicPartitions;
     }
 
-    public void close(){
+    public void close() {
         this.client.close();
     }
 }
diff --git 
a/hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/reuse/BytesDemo.java
 
b/hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/reuse/BytesDemo.java
index ba3c1e48..ea7bbbd9 100644
--- 
a/hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/reuse/BytesDemo.java
+++ 
b/hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/reuse/BytesDemo.java
@@ -39,7 +39,7 @@ public class BytesDemo {
     static HugeClient client;
     boolean bypassServer = true;
     RocksDBSerializer ser;
-    HBaseSerializer HBaseSer;
+    HBaseSerializer hBaseSer;
 
     public static void main(String[] args) {
         BytesDemo ins = new BytesDemo();
@@ -54,7 +54,6 @@ public class BytesDemo {
 
         SchemaManager schema = client.schema();
 
-
         schema.propertyKey("name").asText().ifNotExist().create();
         schema.propertyKey("age").asInt().ifNotExist().create();
         schema.propertyKey("lang").asText().ifNotExist().create();
@@ -97,7 +96,7 @@ public class BytesDemo {
               .ifNotExist()
               .create();
 
-        HBaseSer = new HBaseSerializer(client, vertexLogicPartitions, 
edgeLogicPartitions);
+        hBaseSer = new HBaseSerializer(client, vertexLogicPartitions, 
edgeLogicPartitions);
         writeGraphElements();
 
         client.close();
@@ -130,7 +129,6 @@ public class BytesDemo {
             add(vadasB);
         }};
 
-
         List<Edge> edges = new ArrayList<Edge>() {{
             add(peterCreateLop);
         }};
@@ -148,14 +146,14 @@ public class BytesDemo {
      * */
     void writeDirectly(List<Vertex> vertices, List<Edge> edges) {
         for (Vertex vertex : vertices) {
-            byte[] rowkey = HBaseSer.getKeyBytes(vertex);
-            byte[] values = HBaseSer.getValueBytes(vertex);
+            byte[] rowkey = hBaseSer.getKeyBytes(vertex);
+            byte[] values = hBaseSer.getValueBytes(vertex);
             sendRpcToHBase("vertex", rowkey, values);
         }
 
         for (Edge edge : edges) {
-            byte[] rowkey = HBaseSer.getKeyBytes(edge);
-            byte[] values = HBaseSer.getValueBytes(edge);
+            byte[] rowkey = hBaseSer.getKeyBytes(edge);
+            byte[] values = hBaseSer.getValueBytes(edge);
             sendRpcToHBase("edge", rowkey, values);
         }
     }
@@ -185,10 +183,8 @@ public class BytesDemo {
         return flag;
     }
 
-
     boolean put(String type, byte[] rowkey, byte[] values) throws IOException {
         // TODO: put to HBase
         return true;
     }
-
 }
diff --git 
a/hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/util/IdGenerator.java
 
b/hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/util/IdGenerator.java
index c6c51e37..c6f79cb9 100644
--- 
a/hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/util/IdGenerator.java
+++ 
b/hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/util/IdGenerator.java
@@ -28,19 +28,19 @@ public abstract class IdGenerator {
 
     public static final Id ZERO = IdGenerator.of(0L);
 
-    public final static Id of(String id) {
+    public static Id of(String id) {
         return new StringId(id);
     }
 
-    public final static Id of(UUID id) {
+    public static Id of(UUID id) {
         return new UuidId(id);
     }
 
-    public final static Id of(String id, boolean uuid) {
+    public static Id of(String id, boolean uuid) {
         return uuid ? new UuidId(id) : new StringId(id);
     }
 
-    public final static Id of(long id) {
+    public static Id of(long id) {
         return new LongId(id);
     }
 
@@ -57,7 +57,7 @@ public abstract class IdGenerator {
         return new ObjectId(id);
     }
 
-    public final static Id of(byte[] bytes, Id.IdType type) {
+    public static Id of(byte[] bytes, Id.IdType type) {
         switch (type) {
             case LONG:
                 return new LongId(bytes);
@@ -70,7 +70,7 @@ public abstract class IdGenerator {
         }
     }
 
-    public final static Id ofStoredString(String id, Id.IdType type) {
+    public static Id ofStoredString(String id, Id.IdType type) {
         switch (type) {
             case LONG:
                 return of(LongEncoding.decodeSignedB64(id));
@@ -84,7 +84,7 @@ public abstract class IdGenerator {
         }
     }
 
-    public final static String asStoredString(Id id) {
+    public static String asStoredString(Id id) {
         switch (id.type()) {
             case LONG:
                 return LongEncoding.encodeSignedB64(id.asLong());
@@ -97,7 +97,7 @@ public abstract class IdGenerator {
         }
     }
 
-    public final static Id.IdType idType(Id id) {
+    public static Id.IdType idType(Id id) {
         if (id instanceof LongId) {
             return Id.IdType.LONG;
         }
diff --git 
a/hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/util/SplicingIdGenerator.java
 
b/hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/util/SplicingIdGenerator.java
index ca2fdcdf..e7951540 100644
--- 
a/hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/util/SplicingIdGenerator.java
+++ 
b/hugegraph-client/src/main/java/org/apache/hugegraph/serializer/direct/util/SplicingIdGenerator.java
@@ -55,16 +55,16 @@ public class SplicingIdGenerator {
     /**
      * Generate a string id of HugeVertex from Vertex name
      */
-//    public Id generate(HugeVertex vertex) {
-//        /*
-//         * Hash for row-key which will be evenly distributed.
-//         * We can also use LongEncoding.encode() to encode the int/long hash
-//         * if needed.
-//         * id = String.format("%s%s%s", HashUtil.hash(id), ID_SPLITOR, id);
-//         */
-//        // TODO: use binary Id with binary fields instead of string id
-//        return splicing(vertex.schemaLabel().id().asString(), vertex.name());
-//    }
+    //    public Id generate(HugeVertex vertex) {
+    //        /*
+    //         * Hash for row-key which will be evenly distributed.
+    //         * We can also use LongEncoding.encode() to encode the int/long 
hash
+    //         * if needed.
+    //         * id = String.format("%s%s%s", HashUtil.hash(id), ID_SPLITOR, 
id);
+    //         */
+    //        // TODO: use binary Id with binary fields instead of string id
+    //        return splicing(vertex.schemaLabel().id().asString(), 
vertex.name());
+    //    }
 
     /**
      * Concat multiple ids into one composite id with IDS_SPLITOR
diff --git 
a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/config/HubbleConfig.java
 
b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/config/HubbleConfig.java
index af2a1962..b5ff22e7 100644
--- 
a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/config/HubbleConfig.java
+++ 
b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/config/HubbleConfig.java
@@ -22,6 +22,8 @@ import java.io.File;
 
 import org.apache.hugegraph.exception.ExternalException;
 import org.apache.hugegraph.options.HubbleOptions;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.boot.ApplicationArguments;
 import org.springframework.context.annotation.Bean;
@@ -32,6 +34,8 @@ import org.apache.hugegraph.common.Constant;
 @Configuration
 public class HubbleConfig {
 
+    private static final Logger LOG = 
LoggerFactory.getLogger(HubbleConfig.class);
+
     @Autowired
     private ApplicationArguments arguments;
 
@@ -56,6 +60,7 @@ public class HubbleConfig {
                 conf = path;
             }
         } catch (Exception ignored) {
+            LOG.error("hugeConfig exception");
         }
         return new HugeConfig(conf);
     }
diff --git 
a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/controller/load/LoadTaskController.java
 
b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/controller/load/LoadTaskController.java
index 6e4a4f59..52a1137c 100644
--- 
a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/controller/load/LoadTaskController.java
+++ 
b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/controller/load/LoadTaskController.java
@@ -244,7 +244,7 @@ public class LoadTaskController extends BaseController {
             return this.service.retry(taskId);
         } finally {
             jobEntity.setJobStatus(JobStatus.LOADING);
-            jobEntity.setUpdateTime( HubbleUtil.nowDate());
+            jobEntity.setUpdateTime(HubbleUtil.nowDate());
             this.jobService.update(jobEntity);
         }
     }
diff --git 
a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/license/ServerInfo.java
 
b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/license/ServerInfo.java
index d441eeb1..f5779094 100644
--- 
a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/license/ServerInfo.java
+++ 
b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/license/ServerInfo.java
@@ -18,8 +18,6 @@
 
 package org.apache.hugegraph.license;
 
-import org.apache.hugegraph.license.MachineInfo;
-
 public final class ServerInfo {
 
     private final String serverId;
diff --git 
a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/service/load/LoadTaskService.java
 
b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/service/load/LoadTaskService.java
index 896b6550..3d374533 100644
--- 
a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/service/load/LoadTaskService.java
+++ 
b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/service/load/LoadTaskService.java
@@ -64,6 +64,7 @@ import org.apache.hugegraph.loader.util.MappingUtil;
 import org.apache.hugegraph.mapper.load.LoadTaskMapper;
 import org.apache.hugegraph.service.SettingSSLService;
 import org.apache.hugegraph.util.Ex;
+
 import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
 import com.baomidou.mybatisplus.core.metadata.IPage;
 import com.baomidou.mybatisplus.core.toolkit.Wrappers;
@@ -421,8 +422,8 @@ public class LoadTaskService {
     }
 
     private List<org.apache.hugegraph.loader.mapping.VertexMapping>
-            buildVertexMappings(GraphConnection connection,
-                                FileMapping fileMapping) {
+        buildVertexMappings(GraphConnection connection,
+                        FileMapping fileMapping) {
         int connId = connection.getId();
         List<org.apache.hugegraph.loader.mapping.VertexMapping> vMappings =
                 new ArrayList<>();
@@ -435,7 +436,8 @@ public class LoadTaskService {
                 Ex.check(idFields.size() == 1,
                          "When the ID strategy is CUSTOMIZED, you must " +
                          "select a column in the file as the id");
-                vMapping = new 
org.apache.hugegraph.loader.mapping.VertexMapping(idFields.get(0), true);
+                vMapping = new 
org.apache.hugegraph.loader.mapping.VertexMapping(idFields.get(0),
+                                                                               
  true);
             } else {
                 assert vl.getIdStrategy().isPrimaryKey();
                 List<String> primaryKeys = vl.getPrimaryKeys();
@@ -475,8 +477,8 @@ public class LoadTaskService {
     }
 
     private List<org.apache.hugegraph.loader.mapping.EdgeMapping>
-            buildEdgeMappings(GraphConnection connection,
-                              FileMapping fileMapping) {
+        buildEdgeMappings(GraphConnection connection,
+                      FileMapping fileMapping) {
         int connId = connection.getId();
         List<org.apache.hugegraph.loader.mapping.EdgeMapping> eMappings =
                 new ArrayList<>();
@@ -526,7 +528,7 @@ public class LoadTaskService {
 
             org.apache.hugegraph.loader.mapping.EdgeMapping eMapping;
             eMapping = new org.apache.hugegraph.loader.mapping.EdgeMapping(
-                       sourceFields, unfoldSource, targetFields, unfoldTarget);
+                    sourceFields, unfoldSource, targetFields, unfoldTarget);
             // set label
             eMapping.label(mapping.getLabel());
             // set field_mapping
diff --git 
a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/service/query/ExecuteHistoryService.java
 
b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/service/query/ExecuteHistoryService.java
index 91eea19c..e51ba6f4 100644
--- 
a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/service/query/ExecuteHistoryService.java
+++ 
b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/service/query/ExecuteHistoryService.java
@@ -39,6 +39,7 @@ import org.apache.hugegraph.options.HubbleOptions;
 import org.apache.hugegraph.service.HugeClientPoolService;
 import org.apache.hugegraph.structure.Task;
 import org.apache.hugegraph.util.HubbleUtil;
+
 import com.baomidou.mybatisplus.core.conditions.query.QueryWrapper;
 import com.baomidou.mybatisplus.core.metadata.IPage;
 import com.baomidou.mybatisplus.core.toolkit.Wrappers;
@@ -92,7 +93,6 @@ public class ExecuteHistoryService {
         return results;
     }
 
-
     public ExecuteHistory get(int connId, int id) {
         HugeClient client = this.getClient(connId);
         ExecuteHistory history = this.mapper.selectById(id);
@@ -111,9 +111,9 @@ public class ExecuteHistoryService {
 
     @Transactional(isolation = Isolation.READ_COMMITTED)
     public void save(ExecuteHistory history) {
-         if (this.mapper.insert(history) != 1) {
-             throw new InternalException("entity.insert.failed", history);
-         }
+        if (this.mapper.insert(history) != 1) {
+            throw new InternalException("entity.insert.failed", history);
+        }
     }
 
     @Transactional(isolation = Isolation.READ_COMMITTED)
diff --git 
a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/util/GremlinUtil.java
 
b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/util/GremlinUtil.java
index 352e1fc0..acd31181 100644
--- 
a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/util/GremlinUtil.java
+++ 
b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/util/GremlinUtil.java
@@ -44,17 +44,17 @@ public final class GremlinUtil {
     );
 
     private static final String[] COMPILE_SEARCH_LIST = new String[]{
-            ".", "(", ")"
+        ".", "(", ")"
     };
     private static final String[] COMPILE_TARGET_LIST = new String[]{
-            "\\.", "\\(", "\\)"
+        "\\.", "\\(", "\\)"
     };
 
     private static final String[] ESCAPE_SEARCH_LIST = new String[]{
-            "\\", "\"", "'", "\n"
+        "\\", "\"", "'", "\n"
     };
     private static final String[] ESCAPE_TARGET_LIST = new String[]{
-            "\\\\", "\\\"", "\\'", "\\n"
+        "\\\\", "\\\"", "\\'", "\\n"
     };
 
     private static final Set<Pattern> LIMIT_PATTERNS = compile(LIMIT_SUFFIXES);
diff --git 
a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/util/SerializeUtil.java
 
b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/util/SerializeUtil.java
index d92153b2..d427598b 100644
--- 
a/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/util/SerializeUtil.java
+++ 
b/hugegraph-hubble/hubble-be/src/main/java/org/apache/hugegraph/util/SerializeUtil.java
@@ -25,7 +25,6 @@ import org.apache.commons.io.FileUtils;
 import com.fasterxml.jackson.core.JsonGenerator;
 import com.fasterxml.jackson.databind.JsonSerializer;
 import com.fasterxml.jackson.databind.SerializerProvider;
-import org.apache.hugegraph.util.TimeUtil;
 
 public final class SerializeUtil {
 
diff --git 
a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphOutputFormat.java
 
b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphOutputFormat.java
index 7bfefa16..1f9754d0 100644
--- 
a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphOutputFormat.java
+++ 
b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/flink/HugeGraphOutputFormat.java
@@ -183,15 +183,15 @@ public class HugeGraphOutputFormat<T> extends 
RichOutputFormat<T> {
                     if (isVertex) {
                         BatchVertexRequest.Builder req = new 
BatchVertexRequest.Builder();
                         req.vertices((List<Vertex>) (Object) graphElements)
-                           .updatingStrategies(updateStrategyMap)
-                           .createIfNotExist(true);
+                            .updatingStrategies(updateStrategyMap)
+                            .createIfNotExist(true);
                         g.updateVertices(req.build());
                     } else {
                         BatchEdgeRequest.Builder req = new 
BatchEdgeRequest.Builder();
                         req.edges((List<Edge>) (Object) graphElements)
-                           .updatingStrategies(updateStrategyMap)
-                           .checkVertex(this.loadOptions.checkVertex)
-                           .createIfNotExist(true);
+                            .updatingStrategies(updateStrategyMap)
+                            .checkVertex(this.loadOptions.checkVertex)
+                            .createIfNotExist(true);
                         g.updateEdges(req.build());
                     }
                     break;
diff --git 
a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/LocalFileReader.java
 
b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/LocalFileReader.java
index d41ac49d..bf682544 100644
--- 
a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/LocalFileReader.java
+++ 
b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/reader/file/LocalFileReader.java
@@ -18,7 +18,6 @@
 package org.apache.hugegraph.loader.reader.file;
 
 import java.io.File;
-import java.io.FileInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.nio.file.Files;
diff --git 
a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/spark/HugeGraphSparkLoader.java
 
b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/spark/HugeGraphSparkLoader.java
index 5ee06072..549f9e1c 100644
--- 
a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/spark/HugeGraphSparkLoader.java
+++ 
b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/spark/HugeGraphSparkLoader.java
@@ -99,11 +99,11 @@ public class HugeGraphSparkLoader implements Serializable {
         this.executor = Executors.newCachedThreadPool();
     }
 
-    private void  registerKryoClasses (SparkConf conf) {
+    private void registerKryoClasses(SparkConf conf) {
         try {
             conf.set("spark.serializer", 
"org.apache.spark.serializer.KryoSerializer")
                 .set("spark.kryo.registrationRequired", "true")
-                .registerKryoClasses(new Class[] {
+                .registerKryoClasses(new Class[]{
                     ImmutableBytesWritable.class,
                     KeyValue.class,
                     org.apache.spark.sql.types.StructType.class,
@@ -115,10 +115,10 @@ public class HugeGraphSparkLoader implements Serializable 
{
                     org.apache.spark.sql.catalyst.InternalRow.class,
                     org.apache.spark.sql.catalyst.InternalRow[].class,
                     Class.forName("org.apache.spark.internal.io." +
-                            "FileCommitProtocol$TaskCommitMessage"),
+                                      "FileCommitProtocol$TaskCommitMessage"),
                     Class.forName("scala.collection.immutable.Set$EmptySet$"),
                     Class.forName("org.apache.spark.sql.types.DoubleType$")
-                    });
+                });
         } catch (ClassNotFoundException e) {
             LOG.error("spark kryo serialized registration failed");
             throw new LoadException("spark kryo serialized registration 
failed", e);
@@ -339,15 +339,15 @@ public class HugeGraphSparkLoader implements Serializable 
{
                 BatchVertexRequest.Builder req =
                         new BatchVertexRequest.Builder();
                 req.vertices((List<Vertex>) (Object) graphElements)
-                   .updatingStrategies(updateStrategyMap)
-                   .createIfNotExist(true);
+                    .updatingStrategies(updateStrategyMap)
+                    .createIfNotExist(true);
                 g.updateVertices(req.build());
             } else {
                 BatchEdgeRequest.Builder req = new BatchEdgeRequest.Builder();
                 req.edges((List<Edge>) (Object) graphElements)
-                   .updatingStrategies(updateStrategyMap)
-                   .checkVertex(isCheckVertex)
-                   .createIfNotExist(true);
+                    .updatingStrategies(updateStrategyMap)
+                    .checkVertex(isCheckVertex)
+                    .createIfNotExist(true);
                 g.updateEdges(req.build());
             }
         }
diff --git 
a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/InsertTask.java
 
b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/InsertTask.java
index b773f3fe..87d93419 100644
--- 
a/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/InsertTask.java
+++ 
b/hugegraph-loader/src/main/java/org/apache/hugegraph/loader/task/InsertTask.java
@@ -45,11 +45,11 @@ public abstract class InsertTask implements Runnable {
     );
 
     public static final String[] UNACCEPTABLE_MESSAGES = {
-            // org.apache.http.conn.HttpHostConnectException
-            "Connection refused",
-            "The server is being shutting down",
-            "not allowed to insert, because already exist a vertex " +
-            "with same id and different label"
+        // org.apache.http.conn.HttpHostConnectException
+        "Connection refused",
+        "The server is being shutting down",
+        "not allowed to insert, because already exist a vertex " +
+        "with same id and different label"
     };
 
     protected final LoadContext context;
@@ -127,16 +127,16 @@ public abstract class InsertTask implements Runnable {
         if (this.type().isVertex()) {
             BatchVertexRequest.Builder req = new BatchVertexRequest.Builder();
             req.vertices((List<Vertex>) (Object) elements)
-               .updatingStrategies(this.mapping.updateStrategies())
-               .createIfNotExist(true);
+                .updatingStrategies(this.mapping.updateStrategies())
+                .createIfNotExist(true);
 
             client.graph().updateVertices(req.build());
         } else {
             BatchEdgeRequest.Builder req = new BatchEdgeRequest.Builder();
             req.edges((List<Edge>) (Object) elements)
-               .updatingStrategies(this.mapping.updateStrategies())
-               .checkVertex(checkVertex)
-               .createIfNotExist(true);
+                .updatingStrategies(this.mapping.updateStrategies())
+                .checkVertex(checkVertex)
+                .createIfNotExist(true);
 
             client.graph().updateEdges(req.build());
         }
diff --git 
a/hugegraph-tools/src/main/java/org/apache/hugegraph/base/Printer.java 
b/hugegraph-tools/src/main/java/org/apache/hugegraph/base/Printer.java
index 86aa3d09..9af58ba2 100644
--- a/hugegraph-tools/src/main/java/org/apache/hugegraph/base/Printer.java
+++ b/hugegraph-tools/src/main/java/org/apache/hugegraph/base/Printer.java
@@ -27,7 +27,7 @@ public class Printer {
         System.out.println(content);
     }
 
-    public static void print(String content, Object ... objects) {
+    public static void print(String content, Object... objects) {
         System.out.println(String.format(content, objects));
     }
 
diff --git 
a/hugegraph-tools/src/main/java/org/apache/hugegraph/base/ToolClient.java 
b/hugegraph-tools/src/main/java/org/apache/hugegraph/base/ToolClient.java
index 86cea21f..5d03c2a6 100644
--- a/hugegraph-tools/src/main/java/org/apache/hugegraph/base/ToolClient.java
+++ b/hugegraph-tools/src/main/java/org/apache/hugegraph/base/ToolClient.java
@@ -45,7 +45,8 @@ public class ToolClient {
             info.username = "";
             info.password = "";
         }
-        String trustStoreFile, trustStorePassword;
+        String trustStoreFile;
+        String trustStorePassword;
         if (info.url.startsWith("https")) {
             if (info.trustStoreFile == null || info.trustStoreFile.isEmpty()) {
                 trustStoreFile = Paths.get(homePath(), 
DEFAULT_TRUST_STORE_FILE)
diff --git 
a/hugegraph-tools/src/main/java/org/apache/hugegraph/cmd/HugeGraphCommand.java 
b/hugegraph-tools/src/main/java/org/apache/hugegraph/cmd/HugeGraphCommand.java
index da5c46ea..8a3501c7 100644
--- 
a/hugegraph-tools/src/main/java/org/apache/hugegraph/cmd/HugeGraphCommand.java
+++ 
b/hugegraph-tools/src/main/java/org/apache/hugegraph/cmd/HugeGraphCommand.java
@@ -163,7 +163,7 @@ public class HugeGraphCommand {
         this.throwMode.throwMode = throwMode;
     }
 
-    public JCommander jCommander() {
+    public JCommander jcommander() {
         JCommander.Builder builder = JCommander.newBuilder();
 
         // Add main command firstly
@@ -474,7 +474,7 @@ public class HugeGraphCommand {
     }
 
     public JCommander parseCommand(String[] args) {
-        JCommander jCommander = this.jCommander();
+        JCommander jCommander = this.jcommander();
         if (args.length == 0) {
             throw ExitException.exception(ToolUtil.commandUsage(jCommander),
                                           "No command found, please input" +
diff --git 
a/hugegraph-tools/src/main/java/org/apache/hugegraph/manager/AuthBackupRestoreManager.java
 
b/hugegraph-tools/src/main/java/org/apache/hugegraph/manager/AuthBackupRestoreManager.java
index 51830bf0..f8a49490 100644
--- 
a/hugegraph-tools/src/main/java/org/apache/hugegraph/manager/AuthBackupRestoreManager.java
+++ 
b/hugegraph-tools/src/main/java/org/apache/hugegraph/manager/AuthBackupRestoreManager.java
@@ -107,7 +107,7 @@ public class AuthBackupRestoreManager extends 
BackupRestoreBaseManager {
         E.checkState(CollectionUtils.isNotEmpty(authManagers),
                      "Backup data is empty, please check the type");
         for (AuthManager authManager : authManagers) {
-             authManager.backup();
+            authManager.backup();
         }
     }
 
@@ -127,13 +127,13 @@ public class AuthBackupRestoreManager extends 
BackupRestoreBaseManager {
                      "Restore data is empty, please check the type");
         List<String> allConflicts = Lists.newArrayList();
         for (AuthManager authManager : authManagers) {
-             allConflicts.addAll(authManager.checkConflict());
+            allConflicts.addAll(authManager.checkConflict());
         }
         E.checkState(CollectionUtils.isEmpty(allConflicts),
                      "Restore conflict with STOP strategy, conflicting " +
                      "data is s%", JsonUtil.toJson(allConflicts));
         for (AuthManager authManager : authManagers) {
-             authManager.restore();
+            authManager.restore();
         }
     }
 
@@ -158,7 +158,7 @@ public class AuthBackupRestoreManager extends 
BackupRestoreBaseManager {
                     break;
                 default:
                     throw new AssertionError(String.format(
-                              "Bad auth restore type: %s", type));
+                            "Bad auth restore type: %s", type));
             }
         }
         return authManagers;
@@ -177,10 +177,10 @@ public class AuthBackupRestoreManager extends 
BackupRestoreBaseManager {
         InputStream is = this.inputStream(type.string());
         try (InputStreamReader isr = new InputStreamReader(is, API.CHARSET);
              BufferedReader reader = new BufferedReader(isr)) {
-             String line;
-             while ((line = reader.readLine()) != null) {
-                 resultList.add(line);
-             }
+            String line;
+            while ((line = reader.readLine()) != null) {
+                resultList.add(line);
+            }
         } catch (IOException e) {
             throw new ToolsException("Failed to deserialize %s from %s",
                                      e, type.string(), resultList);
@@ -260,7 +260,7 @@ public class AuthBackupRestoreManager extends 
BackupRestoreBaseManager {
                                      "querying users of authority");
             Map<String, User> userMap = Maps.newHashMap();
             for (User user : users) {
-                 userMap.put(user.name(), user);
+                userMap.put(user.name(), user);
             }
             List<String> userJsons = readRestoreData(HugeType.USER);
             List<String> conflicts = Lists.newArrayList();
@@ -339,7 +339,7 @@ public class AuthBackupRestoreManager extends 
BackupRestoreBaseManager {
                                        "querying groups of authority");
             Map<String, Group> groupMap = Maps.newHashMap();
             for (Group group : groups) {
-                 groupMap.put(group.name(), group);
+                groupMap.put(group.name(), group);
             }
             List<String> groupJsons = readRestoreData(HugeType.GROUP);
             List<String> conflicts = Lists.newArrayList();
@@ -409,7 +409,7 @@ public class AuthBackupRestoreManager extends 
BackupRestoreBaseManager {
                                          "querying targets of authority");
             Map<String, Target> targetMap = Maps.newHashMap();
             for (Target target : targets) {
-                 targetMap.put(target.name(), target);
+                targetMap.put(target.name(), target);
             }
             List<String> targetJsons = readRestoreData(HugeType.TARGET);
             List<String> conflicts = Lists.newArrayList();
@@ -483,10 +483,10 @@ public class AuthBackupRestoreManager extends 
BackupRestoreBaseManager {
         public List<String> checkConflict() {
             List<Belong> belongs = retry(client.authManager()::listBelongs,
                                          "querying belongs of authority");
-            Map<String, Belong>  belongMap = Maps.newHashMap();
+            Map<String, Belong> belongMap = Maps.newHashMap();
             for (Belong belong : belongs) {
-                 String belongKey = belong.user() + ":" + belong.group();
-                 belongMap.put(belongKey, belong);
+                String belongKey = belong.user() + ":" + belong.group();
+                belongMap.put(belongKey, belong);
             }
             List<String> belongJsons = readRestoreData(HugeType.BELONG);
             List<String> conflicts = Lists.newArrayList();
@@ -520,7 +520,7 @@ public class AuthBackupRestoreManager extends 
BackupRestoreBaseManager {
                 
restoreBelong.user(idsMap.get(restoreBelong.user().toString()));
                 
restoreBelong.group(idsMap.get(restoreBelong.group().toString()));
                 retry(() -> {
-                     return client.authManager().createBelong(restoreBelong);
+                    return client.authManager().createBelong(restoreBelong);
                 }, "restore belongs of authority");
                 count++;
             }
@@ -545,10 +545,10 @@ public class AuthBackupRestoreManager extends 
BackupRestoreBaseManager {
         public List<String> checkConflict() {
             List<Access> accesses = retry(client.authManager()::listAccesses,
                                           "querying accesses of authority");
-            Map<String, Access>  accessMap = Maps.newHashMap();
+            Map<String, Access> accessMap = Maps.newHashMap();
             for (Access access : accesses) {
-                 String accessKey = access.group() + ":" + access.target();
-                 accessMap.put(accessKey, access);
+                String accessKey = access.group() + ":" + access.target();
+                accessMap.put(accessKey, access);
             }
             List<String> accessJsons = readRestoreData(HugeType.ACCESS);
             List<String> conflicts = Lists.newArrayList();
@@ -582,7 +582,7 @@ public class AuthBackupRestoreManager extends 
BackupRestoreBaseManager {
                 
restoreAccess.target(idsMap.get(restoreAccess.target().toString()));
                 
restoreAccess.group(idsMap.get(restoreAccess.group().toString()));
                 retry(() -> {
-                     return client.authManager().createAccess(restoreAccess);
+                    return client.authManager().createAccess(restoreAccess);
                 }, "restore accesses of authority");
                 count++;
             }
diff --git a/pom.xml b/pom.xml
index 19273312..b11b9e7f 100644
--- a/pom.xml
+++ b/pom.xml
@@ -439,12 +439,13 @@
                     </dependency>
                 </dependencies>
                 <configuration>
-                    <configLocation>checkstyle.xml</configLocation>
+                    <configLocation>tools/checkstyle.xml</configLocation>
                     <encoding>UTF-8</encoding>
                     <consoleOutput>true</consoleOutput>
                     <failsOnError>true</failsOnError>
                     <linkXRef>false</linkXRef>
                     
<includeTestSourceDirectory>false</includeTestSourceDirectory>
+                    
<suppressionsLocation>tools/suppressions.xml</suppressionsLocation>
                 </configuration>
                 <executions>
                     <execution>
diff --git a/checkstyle.xml b/tools/checkstyle.xml
similarity index 99%
rename from checkstyle.xml
rename to tools/checkstyle.xml
index 79f0983f..9b21713d 100644
--- a/checkstyle.xml
+++ b/tools/checkstyle.xml
@@ -20,7 +20,6 @@
     <property name="charset" value="UTF-8"/>
     <property name="severity" value="info"/>
     <property name="fileExtensions" value="java, properties, xml"/>
-
     <module name="FileTabCharacter">
         <property name="eachLine" value="true"/>
     </module>
@@ -123,7 +122,7 @@
             <property name="caseIndent" value="4"/>
             <property name="throwsIndent" value="2"/>
             <property name="lineWrappingIndentation" value="4"/>
-            <property name="arrayInitIndent" value="4"/>
+            <property name="arrayInitIndent" value="0"/>
         </module>
         <module name="UpperEll"/>
         <module name="IllegalImport">
diff --git a/tools/suppressions.xml b/tools/suppressions.xml
new file mode 100644
index 00000000..098b474f
--- /dev/null
+++ b/tools/suppressions.xml
@@ -0,0 +1,26 @@
+<?xml version="1.0"?>
+<!--
+  Licensed to the Apache Software Foundation (ASF) under one or more
+  contributor license agreements.  See the NOTICE file distributed with
+  this work for additional information regarding copyright ownership.
+  The ASF licenses this file to You under the Apache License, Version 2.0
+  (the "License"); you may not use this file except in compliance with
+  the License.  You may obtain a copy of the License at
+      http://www.apache.org/licenses/LICENSE-2.0
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+  -->
+<!DOCTYPE suppressions PUBLIC
+        "-//Checkstyle//DTD SuppressionFilter Configuration 1.2//EN"
+        "https://checkstyle.org/dtds/suppressions_1_2.dtd";>
+<suppressions>
+    <suppress files="target[\\/].*" checks=".*"/>
+    <suppress files="messages.properties" checks="LineLength"/>
+    <suppress files="(Batch|Single)Example.java" checks="RegexpSingleline"/>
+    <suppress files="(Printer|ToolUtil).java" checks="RegexpSingleline"/>
+    <suppress files="Constant.java" checks="AvoidEscapedUnicodeCharacters"/>
+    <suppress files="BytesDemo.java" checks="Indentation"/>
+</suppressions>


Reply via email to