This is an automated email from the ASF dual-hosted git repository.

qiaojialin pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-iotdb.git


The following commit(s) were added to refs/heads/master by this push:
     new 7585123  fix some doc and SketchTool mistakes (#1185)
7585123 is described below

commit 75851231b24100f38b48e00da13fa2917c434069
Author: Haonan <[email protected]>
AuthorDate: Fri May 15 08:41:41 2020 +0800

    fix some doc and SketchTool mistakes (#1185)
    
    * fix some doc mistakes
    * fix a bug in sketchTool
---
 NOTICE                                             |  10 -
 NOTICE-binary                                      |  10 -
 docs/SystemDesign/1-TsFile/2-Format.md             | 411 ++++++++++-----------
 docs/UserGuide/1-Overview/2-Architecture.md        |   6 +-
 docs/UserGuide/1-Overview/3-Scenario.md            |   4 +-
 docs/UserGuide/4-Client/3-Programming - JDBC.md    |  72 ----
 docs/UserGuide/4-Client/7-Status Codes.md          | 117 +++---
 .../7-Ecosystem Integration/2-MapReduce TsFile.md  |   2 +-
 .../8-Architecture/3-Shared Nothing Cluster.md     |   2 +-
 docs/zh/SystemDesign/1-TsFile/2-Format.md          | 279 ++++++++++----
 docs/zh/UserGuide/1-Overview/2-Architecture.md     |   6 +-
 docs/zh/UserGuide/1-Overview/3-Scenario.md         |  10 +-
 docs/zh/UserGuide/4-Client/3-Programming - JDBC.md |  74 ----
 docs/zh/UserGuide/4-Client/7-Status Codes.md       | 118 +++---
 .../apache/iotdb/db/tools/TsFileSketchTool.java    |  51 ++-
 15 files changed, 602 insertions(+), 570 deletions(-)

diff --git a/NOTICE b/NOTICE
index 7c11064..6dca58b 100644
--- a/NOTICE
+++ b/NOTICE
@@ -17,16 +17,6 @@ grant the users the right to the use of patent under the 
requirement of Apache 2
 
 ============================================================================
 
-This product contains a modified portion of 'Apache Hive'
-
-Apache Hive
-Copyright 2008-2018 The Apache Software Foundation
-
-This product includes software developed by The Apache Software
-Foundation (http://www.apache.org/).
-
-============================================================================
-
 Apache Commons Collections
 Copyright 2001-2019 The Apache Software Foundation
 
diff --git a/NOTICE-binary b/NOTICE-binary
index 7c11064..6dca58b 100644
--- a/NOTICE-binary
+++ b/NOTICE-binary
@@ -17,16 +17,6 @@ grant the users the right to the use of patent under the 
requirement of Apache 2
 
 ============================================================================
 
-This product contains a modified portion of 'Apache Hive'
-
-Apache Hive
-Copyright 2008-2018 The Apache Software Foundation
-
-This product includes software developed by The Apache Software
-Foundation (http://www.apache.org/).
-
-============================================================================
-
 Apache Commons Collections
 Copyright 2001-2019 The Apache Software Foundation
 
diff --git a/docs/SystemDesign/1-TsFile/2-Format.md 
b/docs/SystemDesign/1-TsFile/2-Format.md
index a2aac9c..205ef16 100644
--- a/docs/SystemDesign/1-TsFile/2-Format.md
+++ b/docs/SystemDesign/1-TsFile/2-Format.md
@@ -403,217 +403,212 @@ Sketch save path:TsFile_sketch_view.txt
 file path: test.tsfile
 file length: 33436
 
-            POSITION|  CONTENT
-            --------   -------
-                   0|  [magic head] TsFile
-                   6|  [version number] 000002
-|||||||||||||||||||||  [Chunk Group] of root.group_12.d2, num of Chunks:3
-                  12|  [Chunk] of s_INT64e_RLE, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT64, 
-                       startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          2 pages
-                 677|  [Chunk] of s_INT64e_TS_2DIFF, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT64, 
-                       startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          1 pages
-                1349|  [Chunk] of s_INT64e_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT64, 
-                       startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          2 pages
-                5766|  [Chunk Group Footer]
-                    |          [marker] 0
-                    |          [deviceID] root.group_12.d2
-                    |          [dataSize] 5754
-                    |          [num of chunks] 3
-|||||||||||||||||||||  [Chunk Group] of root.group_12.d2 ends
-                5799|  [Version Info pair]
-                    |          [marker] 3
-                    |          [offset] 5808
-                    |          [version] 102
-|||||||||||||||||||||  [Chunk Group] of root.group_12.d1, num of Chunks:3
-                5808|  [Chunk] of s_INT32e_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT32, 
-                       startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          1 pages
-                8231|  [Chunk] of s_INT32e_TS_2DIFF, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT32, 
-                       startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          1 pages
-                8852|  [Chunk] of s_INT32e_RLE, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT32, 
-                       startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          1 pages
-                9399|  [Chunk Group Footer]
-                    |          [marker] 0
-                    |          [deviceID] root.group_12.d1
-                    |          [dataSize] 3591
-                    |          [num of chunks] 3
-|||||||||||||||||||||  [Chunk Group] of root.group_12.d1 ends
-                9432|  [Version Info pair]
-                    |          [marker] 3
-                    |          [offset] 9441
-                    |          [version] 102
-|||||||||||||||||||||  [Chunk Group] of root.group_12.d0, num of Chunks:2
-                9441|  [Chunk] of s_BOOLEANe_RLE, numOfPoints:10000, time 
range:[1,10000], tsDataType:BOOLEAN, 
-                       startTime: 1 endTime: 10000 count: 10000 
[firstValue:true,lastValue:true]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          1 pages
-                9968|  [Chunk] of s_BOOLEANe_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:BOOLEAN, 
-                       startTime: 1 endTime: 10000 count: 10000 
[firstValue:true,lastValue:true]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          1 pages
-               10961|  [Chunk Group Footer]
-                    |          [marker] 0
-                    |          [deviceID] root.group_12.d0
-                    |          [dataSize] 1520
-                    |          [num of chunks] 2
-|||||||||||||||||||||  [Chunk Group] of root.group_12.d0 ends
-               10994|  [Version Info pair]
-                    |          [marker] 3
-                    |          [offset] 11003
-                    |          [version] 102
-|||||||||||||||||||||  [Chunk Group] of root.group_12.d5, num of Chunks:1
-               11003|  [Chunk] of s_TEXTe_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:TEXT, 
-                       startTime: 1 endTime: 10000 count: 10000 
[firstValue:version_test,lastValue:version_test]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          3 pages
-               19278|  [Chunk Group Footer]
-                    |          [marker] 0
-                    |          [deviceID] root.group_12.d5
-                    |          [dataSize] 8275
-                    |          [num of chunks] 1
-|||||||||||||||||||||  [Chunk Group] of root.group_12.d5 ends
-               19311|  [Version Info pair]
-                    |          [marker] 3
-                    |          [offset] 19320
-                    |          [version] 102
-|||||||||||||||||||||  [Chunk Group] of root.group_12.d4, num of Chunks:4
-               19320|  [Chunk] of s_DOUBLEe_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:DOUBLE, 
-                       startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00000000123]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          2 pages
-               23740|  [Chunk] of s_DOUBLEe_TS_2DIFF, numOfPoints:10000, time 
range:[1,10000], tsDataType:DOUBLE, 
-                       startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000002045]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          1 pages
-               24414|  [Chunk] of s_DOUBLEe_GORILLA, numOfPoints:10000, time 
range:[1,10000], tsDataType:DOUBLE, 
-                       startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000002045]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          1 pages
-               25054|  [Chunk] of s_DOUBLEe_RLE, numOfPoints:10000, time 
range:[1,10000], tsDataType:DOUBLE, 
-                       startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000001224]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          2 pages
-               25717|  [Chunk Group Footer]
-                    |          [marker] 0
-                    |          [deviceID] root.group_12.d4
-                    |          [dataSize] 6397
-                    |          [num of chunks] 4
-|||||||||||||||||||||  [Chunk Group] of root.group_12.d4 ends
-               25750|  [Version Info pair]
-                    |          [marker] 3
-                    |          [offset] 25759
-                    |          [version] 102
-|||||||||||||||||||||  [Chunk Group] of root.group_12.d3, num of Chunks:4
-               25759|  [Chunk] of s_FLOATe_GORILLA, numOfPoints:10000, time 
range:[1,10000], tsDataType:FLOAT, 
-                       startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          1 pages
-               26375|  [Chunk] of s_FLOATe_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:FLOAT, 
-                       startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          1 pages
-               28796|  [Chunk] of s_FLOATe_RLE, numOfPoints:10000, time 
range:[1,10000], tsDataType:FLOAT, 
-                       startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          1 pages
-               29343|  [Chunk] of s_FLOATe_TS_2DIFF, numOfPoints:10000, time 
range:[1,10000], tsDataType:FLOAT, 
-                       startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]
-                    |          [marker] 1
-                    |          [ChunkHeader]
-                    |          1 pages
-               29967|  [Chunk Group Footer]
-                    |          [marker] 0
-                    |          [deviceID] root.group_12.d3
-                    |          [dataSize] 4208
-                    |          [num of chunks] 4
-|||||||||||||||||||||  [Chunk Group] of root.group_12.d3 ends
-               30000|  [Version Info pair]
-                    |          [marker] 3
-                    |          [offset] 30009
-                    |          [version] 102
-               30009|  [marker] 2
-               30010|  [ChunkMetadataList] of 
root.group_12.d0.s_BOOLEANe_PLAIN, tsDataType:BOOLEAN
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[firstValue:true,lastValue:true]] 
-               30066|  [ChunkMetadataList] of root.group_12.d0.s_BOOLEANe_RLE, 
tsDataType:BOOLEAN
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[firstValue:true,lastValue:true]] 
-               30120|  [ChunkMetadataList] of root.group_12.d1.s_INT32e_PLAIN, 
tsDataType:INT32
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
-               30196|  [ChunkMetadataList] of root.group_12.d1.s_INT32e_RLE, 
tsDataType:INT32
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
-               30270|  [ChunkMetadataList] of 
root.group_12.d1.s_INT32e_TS_2DIFF, tsDataType:INT32
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
-               30349|  [ChunkMetadataList] of root.group_12.d2.s_INT64e_PLAIN, 
tsDataType:INT64
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
-               30441|  [ChunkMetadataList] of root.group_12.d2.s_INT64e_RLE, 
tsDataType:INT64
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
-               30531|  [ChunkMetadataList] of 
root.group_12.d2.s_INT64e_TS_2DIFF, tsDataType:INT64
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
-               30626|  [ChunkMetadataList] of 
root.group_12.d3.s_FLOATe_GORILLA, tsDataType:FLOAT
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]]
 
-               30704|  [ChunkMetadataList] of root.group_12.d3.s_FLOATe_PLAIN, 
tsDataType:FLOAT
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]]
 
-               30780|  [ChunkMetadataList] of root.group_12.d3.s_FLOATe_RLE, 
tsDataType:FLOAT
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]]
 
-               30854|  [ChunkMetadataList] of 
root.group_12.d3.s_FLOATe_TS_2DIFF, tsDataType:FLOAT
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]]
 
-               30933|  [ChunkMetadataList] of 
root.group_12.d4.s_DOUBLEe_GORILLA, tsDataType:DOUBLE
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000002045]]
 
-               31028|  [ChunkMetadataList] of 
root.group_12.d4.s_DOUBLEe_PLAIN, tsDataType:DOUBLE
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00000000123]]
 
-               31121|  [ChunkMetadataList] of root.group_12.d4.s_DOUBLEe_RLE, 
tsDataType:DOUBLE
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000001224]]
 
-               31212|  [ChunkMetadataList] of 
root.group_12.d4.s_DOUBLEe_TS_2DIFF, tsDataType:DOUBLE
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000002045]]
 
-               31308|  [ChunkMetadataList] of root.group_12.d5.s_TEXTe_PLAIN, 
tsDataType:TEXT
-                    |  [startTime: 1 endTime: 10000 count: 10000 
[firstValue:version_test,lastValue:version_test]] 
-               32840|  [MetadataIndex] of root.group_12.d0
-               32881|  [MetadataIndex] of root.group_12.d1
-               32920|  [MetadataIndex] of root.group_12.d2
-               32959|  [MetadataIndex] of root.group_12.d3
-               33000|  [MetadataIndex] of root.group_12.d4
-               33042|  [MetadataIndex] of root.group_12.d5
-               33080|  [TsFileMetadata]
-                    |          [num of devices] 6
-                    |          6 key&TsMetadataIndex
-                    |          [totalChunkNum] 17
-                    |          [invalidChunkNum] 0
-                    |          [bloom filter bit vector byte array length] 32
-                    |          [bloom filter bit vector byte array] 
-                    |          [bloom filter number of bits] 256
-                    |          [bloom filter number of hash functions] 5
-               33426|  [TsFileMetadataSize] 346
-               33430|  [magic tail] TsFile
-               33436|  END of TsFile
+            POSITION| CONTENT
+            --------  -------
+                   0| [magic head] TsFile
+                   6| [version number] 000002
+||||||||||||||||||||| [Chunk Group] of root.group_12.d2, num of Chunks:3
+                  12| [Chunk] of s_INT64e_RLE, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT64, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   2 pages
+                 677| [Chunk] of s_INT64e_TS_2DIFF, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT64, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+                1349| [Chunk] of s_INT64e_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT64, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   2 pages
+                5766| [Chunk Group Footer]
+                    |   [marker] 0
+                    |   [deviceID] root.group_12.d2
+                    |   [dataSize] 5754
+                    |   [num of chunks] 3
+||||||||||||||||||||| [Chunk Group] of root.group_12.d2 ends
+                5799| [Version Info]
+                    |   [marker] 3
+                    |   [version] 102
+||||||||||||||||||||| [Chunk Group] of root.group_12.d1, num of Chunks:3
+                5808| [Chunk] of s_INT32e_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT32, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+                8231| [Chunk] of s_INT32e_TS_2DIFF, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT32, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+                8852| [Chunk] of s_INT32e_RLE, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT32, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+                9399| [Chunk Group Footer]
+                    |   [marker] 0
+                    |   [deviceID] root.group_12.d1
+                    |   [dataSize] 3591
+                    |   [num of chunks] 3
+||||||||||||||||||||| [Chunk Group] of root.group_12.d1 ends
+                9432| [Version Info]
+                    |   [marker] 3
+                    |   [version] 102
+||||||||||||||||||||| [Chunk Group] of root.group_12.d0, num of Chunks:2
+                9441| [Chunk] of s_BOOLEANe_RLE, numOfPoints:10000, time 
range:[1,10000], tsDataType:BOOLEAN, 
+                      startTime: 1 endTime: 10000 count: 10000 
[firstValue:true,lastValue:true]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+                9968| [Chunk] of s_BOOLEANe_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:BOOLEAN, 
+                      startTime: 1 endTime: 10000 count: 10000 
[firstValue:true,lastValue:true]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+               10961| [Chunk Group Footer]
+                    |   [marker] 0
+                    |   [deviceID] root.group_12.d0
+                    |   [dataSize] 1520
+                    |   [num of chunks] 2
+||||||||||||||||||||| [Chunk Group] of root.group_12.d0 ends
+               10994| [Version Info]
+                    |   [marker] 3
+                    |   [version] 102
+||||||||||||||||||||| [Chunk Group] of root.group_12.d5, num of Chunks:1
+               11003| [Chunk] of s_TEXTe_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:TEXT, 
+                      startTime: 1 endTime: 10000 count: 10000 
[firstValue:version_test,lastValue:version_test]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   3 pages
+               19278| [Chunk Group Footer]
+                    |   [marker] 0
+                    |   [deviceID] root.group_12.d5
+                    |   [dataSize] 8275
+                    |   [num of chunks] 1
+||||||||||||||||||||| [Chunk Group] of root.group_12.d5 ends
+               19311| [Version Info]
+                    |   [marker] 3
+                    |   [version] 102
+||||||||||||||||||||| [Chunk Group] of root.group_12.d4, num of Chunks:4
+               19320| [Chunk] of s_DOUBLEe_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:DOUBLE, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00000000123]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   2 pages
+               23740| [Chunk] of s_DOUBLEe_TS_2DIFF, numOfPoints:10000, time 
range:[1,10000], tsDataType:DOUBLE, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000002045]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+               24414| [Chunk] of s_DOUBLEe_GORILLA, numOfPoints:10000, time 
range:[1,10000], tsDataType:DOUBLE, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000002045]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+               25054| [Chunk] of s_DOUBLEe_RLE, numOfPoints:10000, time 
range:[1,10000], tsDataType:DOUBLE, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000001224]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   2 pages
+               25717| [Chunk Group Footer]
+                    |   [marker] 0
+                    |   [deviceID] root.group_12.d4
+                    |   [dataSize] 6397
+                    |   [num of chunks] 4
+||||||||||||||||||||| [Chunk Group] of root.group_12.d4 ends
+               25750| [Version Info]
+                    |   [marker] 3
+                    |   [version] 102
+||||||||||||||||||||| [Chunk Group] of root.group_12.d3, num of Chunks:4
+               25759| [Chunk] of s_FLOATe_GORILLA, numOfPoints:10000, time 
range:[1,10000], tsDataType:FLOAT, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+               26375| [Chunk] of s_FLOATe_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:FLOAT, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+               28796| [Chunk] of s_FLOATe_RLE, numOfPoints:10000, time 
range:[1,10000], tsDataType:FLOAT, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+               29343| [Chunk] of s_FLOATe_TS_2DIFF, numOfPoints:10000, time 
range:[1,10000], tsDataType:FLOAT, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+               29967| [Chunk Group Footer]
+                    |   [marker] 0
+                    |   [deviceID] root.group_12.d3
+                    |   [dataSize] 4208
+                    |   [num of chunks] 4
+||||||||||||||||||||| [Chunk Group] of root.group_12.d3 ends
+               30000| [Version Info]
+                    |   [marker] 3
+                    |   [version] 102
+               30009| [marker] 2
+               30010| [ChunkMetadataList] of 
root.group_12.d0.s_BOOLEANe_PLAIN, tsDataType:BOOLEAN
+                    | [startTime: 1 endTime: 10000 count: 10000 
[firstValue:true,lastValue:true]] 
+               30066| [ChunkMetadataList] of root.group_12.d0.s_BOOLEANe_RLE, 
tsDataType:BOOLEAN
+                    | [startTime: 1 endTime: 10000 count: 10000 
[firstValue:true,lastValue:true]] 
+               30120| [ChunkMetadataList] of root.group_12.d1.s_INT32e_PLAIN, 
tsDataType:INT32
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
+               30196| [ChunkMetadataList] of root.group_12.d1.s_INT32e_RLE, 
tsDataType:INT32
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
+               30270| [ChunkMetadataList] of 
root.group_12.d1.s_INT32e_TS_2DIFF, tsDataType:INT32
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
+               30349| [ChunkMetadataList] of root.group_12.d2.s_INT64e_PLAIN, 
tsDataType:INT64
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
+               30441| [ChunkMetadataList] of root.group_12.d2.s_INT64e_RLE, 
tsDataType:INT64
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
+               30531| [ChunkMetadataList] of 
root.group_12.d2.s_INT64e_TS_2DIFF, tsDataType:INT64
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
+               30626| [ChunkMetadataList] of 
root.group_12.d3.s_FLOATe_GORILLA, tsDataType:FLOAT
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]]
 
+               30704| [ChunkMetadataList] of root.group_12.d3.s_FLOATe_PLAIN, 
tsDataType:FLOAT
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]]
 
+               30780| [ChunkMetadataList] of root.group_12.d3.s_FLOATe_RLE, 
tsDataType:FLOAT
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]]
 
+               30854| [ChunkMetadataList] of 
root.group_12.d3.s_FLOATe_TS_2DIFF, tsDataType:FLOAT
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]]
 
+               30933| [ChunkMetadataList] of 
root.group_12.d4.s_DOUBLEe_GORILLA, tsDataType:DOUBLE
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000002045]]
 
+               31028| [ChunkMetadataList] of root.group_12.d4.s_DOUBLEe_PLAIN, 
tsDataType:DOUBLE
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00000000123]]
 
+               31121| [ChunkMetadataList] of root.group_12.d4.s_DOUBLEe_RLE, 
tsDataType:DOUBLE
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000001224]]
 
+               31212| [ChunkMetadataList] of 
root.group_12.d4.s_DOUBLEe_TS_2DIFF, tsDataType:DOUBLE
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000002045]]
 
+               31308| [ChunkMetadataList] of root.group_12.d5.s_TEXTe_PLAIN, 
tsDataType:TEXT
+                    | [startTime: 1 endTime: 10000 count: 10000 
[firstValue:version_test,lastValue:version_test]] 
+               32840| [MetadataIndex] of root.group_12.d0
+               32881| [MetadataIndex] of root.group_12.d1
+               32920| [MetadataIndex] of root.group_12.d2
+               32959| [MetadataIndex] of root.group_12.d3
+               33000| [MetadataIndex] of root.group_12.d4
+               33042| [MetadataIndex] of root.group_12.d5
+               33080| [TsFileMetadata]
+                    |   [num of devices] 6
+                    |   6 key&TsMetadataIndex
+                    |   [totalChunkNum] 17
+                    |   [invalidChunkNum] 0
+                    |   [bloom filter bit vector byte array length] 32
+                    |   [bloom filter bit vector byte array] 
+                    |   [bloom filter number of bits] 256
+                    |   [bloom filter number of hash functions] 5
+               33426| [TsFileMetadataSize] 346
+               33430| [magic tail] TsFile
+               33436| END of TsFile
 
 ---------------------------------- TsFile Sketch End 
----------------------------------
 
+
 ````````````````````````
 
 #### 1.3.4 TsFileSequenceRead
diff --git a/docs/UserGuide/1-Overview/2-Architecture.md 
b/docs/UserGuide/1-Overview/2-Architecture.md
index fb9d870..906ef3d 100644
--- a/docs/UserGuide/1-Overview/2-Architecture.md
+++ b/docs/UserGuide/1-Overview/2-Architecture.md
@@ -29,6 +29,10 @@ IoTDB suite can provide a series of functions in the real 
situation such as data
 
 As shown in Figure 1.1, users can use JDBC to import timeseries data collected 
by sensor on the device to local/remote IoTDB. These timeseries data may be 
system state data (such as server load and CPU memory, etc.), message queue 
data, timeseries data from applications, or other timeseries data in the 
database. Users can also write the data directly to the TsFile (local or on 
HDFS).
 
-For the data written to IoTDB and local TsFile, users can use TsFileSync tool 
to synchronize the TsFile to the HDFS, thereby implementing data processing 
tasks such as abnormality detection and machine learning on the Hadoop or Spark 
data processing platform. The results of the analysis can be write back to 
TsFile in the same way.
+For the data written to IoTDB and local TsFile, users can use TsFileSync tool 
to synchronize the TsFile to the HDFS, thereby implementing data processing 
tasks such as abnormality detection and machine learning on the Hadoop or Spark 
data processing platform. 
+
+For the data written to HDFS or local TsFile, users can use 
TsFile-Hadoop-Connector or TsFile-Spark-Connector to allow Hadoop or Spark to 
process data. 
+
+The results of the analysis can be write back to TsFile in the same way.
 
 Also, IoTDB and TsFile provide client tools to meet the various needs of users 
in writing and viewing data in SQL form, script form and graphical form.
diff --git a/docs/UserGuide/1-Overview/3-Scenario.md 
b/docs/UserGuide/1-Overview/3-Scenario.md
index ed2b6f5..280de1d 100644
--- a/docs/UserGuide/1-Overview/3-Scenario.md
+++ b/docs/UserGuide/1-Overview/3-Scenario.md
@@ -35,7 +35,7 @@ At this point, the data can be stored using TsFile component, 
TsFileSync tool, a
 
 In this scenario, only TsFile and TsFileSync are required to be deployed on a 
PC, and a Hadoop/Spark cluster is required. The schematic diagram is shown in 
Figure 1.2. Figure 1.3 shows the architecture at this time.
 
-<img style="width:100%; max-width:800px; max-height:600px; margin-left:auto; 
margin-right:auto; display:block;" 
src="https://user-images.githubusercontent.com/13203019/51579026-77ad1580-1efa-11e9-8345-564b22d70286.jpg";>
+<img style="width:100%; max-width:800px; max-height:600px; margin-left:auto; 
margin-right:auto; display:block;" 
src="https://user-images.githubusercontent.com/25913899/81768490-bf034f00-950d-11ea-9b56-fef3edca0958.png";>
 
 ## Scenario 2
 
@@ -61,7 +61,7 @@ In order to ensure that the data of the robot can be 
monitored and analyzed in t
 
 At this point, IoTDB, IoTDB-Client tools, TsFileSync tools, and Hadoop/Spark 
integration components in the IoTDB suite can be used. IoTDB-Client tool is 
installed on the robot and each of them is connected to the LAN of the factory. 
When sensors generate real-time data, the data will be uploaded to the server 
in the factory. The IoTDB server and TsFileSync is installed on the server 
connected to the external network. Once triggered, the data on the server will 
be upload to the data cente [...]
 
-<img style="width:100%; max-width:800px; max-height:600px; margin-left:auto; 
margin-right:auto; display:block;" 
src="https://user-images.githubusercontent.com/13203019/51579085-9dd2b580-1efa-11e9-97b9-f56bc8d342b0.jpg";>
+<img style="width:100%; max-width:800px; max-height:600px; margin-left:auto; 
margin-right:auto; display:block;" 
src="https://user-images.githubusercontent.com/25913899/81768477-b874d780-950d-11ea-80ca-8807b9bd0970.png";>
 
 ## Scenario 4
 
diff --git a/docs/UserGuide/4-Client/3-Programming - JDBC.md 
b/docs/UserGuide/4-Client/3-Programming - JDBC.md
index 27903f3..c14021d 100644
--- a/docs/UserGuide/4-Client/3-Programming - JDBC.md   
+++ b/docs/UserGuide/4-Client/3-Programming - JDBC.md   
@@ -208,75 +208,3 @@ public class JDBCExample {
   }
 }
 ```
-## Status Code
-
-**Status Code** is introduced in the latest version. For example, as IoTDB 
requires registering the time series first before writing data, a kind of 
solution is:
-
-```
-try {
-    writeData();
-} catch (SQLException e) {
-  // the most case is that the time series does not exist
-  if (e.getMessage().contains("exist")) {
-      //However, using the content of the error message is not so efficient
-      registerTimeSeries();
-      //write data once again
-      writeData();
-  }
-}
-
-```
-
-With Status Code, instead of writing codes like `if 
(e.getErrorMessage().contains("exist"))`, we can simply use `e.getErrorCode() 
== TSStatusCode.TIME_SERIES_NOT_EXIST_ERROR.getStatusCode()`.
-
-Here is a list of Status Code and related message:
-
-|Status Code|Status Type|Meanings|
-|:---|:---|:---|
-|200|SUCCESS_STATUS||
-|201|STILL_EXECUTING_STATUS||
-|202|INVALID_HANDLE_STATUS||
-|203|INCOMPATIBLE_VERSION|Incompatible version|
-|298|NODE_DELETE_FAILED_ERROR|Failed while deleting node|
-|299|ALIAS_ALREADY_EXIST_ERROR|Alias already exists|
-|300|TIMESERIES_ALREADY_EXIST_ERROR|Timeseries already exists|
-|301|TIMESERIES_NOT_EXIST_ERROR|Timeseries does not exist|
-|302|UNSUPPORTED_FETCH_METADATA_OPERATION_ERROR|Unsupported fetch metadata 
operation|
-|303|METADATA_ERROR|Meet error when dealing with metadata|
-|305|OUT_OF_TTL_ERROR|Insertion time is less than TTL time bound|
-|306|CONFIG_ADJUSTER|IoTDB system load is too large|
-|307|MERGE_ERROR|Meet error while merging|
-|308|SYSTEM_CHECK_ERROR|Meet error while system checking|
-|309|SYNC_DEVICE_OWNER_CONFLICT_ERROR|Sync device owners conflict|
-|310|SYNC_CONNECTION_EXCEPTION|Meet error while sync connecting|
-|311|STORAGE_GROUP_PROCESSOR_ERROR|Storage group processor related error|
-|312|STORAGE_GROUP_ERROR|Storage group related error|
-|313|STORAGE_ENGINE_ERROR|Storage engine related error|
-|314|TSFILE_PROCESSOR_ERROR|TsFile processor related error|
-|315|PATH_ILLEGAL|Illegal path|
-|316|LOAD_FILE_ERROR|Meet error while loading file|
-|400|EXECUTE_STATEMENT_ERROR|Execute statement error|
-|401|SQL_PARSE_ERROR|Meet error while parsing SQL|
-|402|GENERATE_TIME_ZONE_ERROR|Meet error while generating time zone|
-|403|SET_TIME_ZONE_ERROR|Meet error while setting time zone|
-|404|NOT_STORAGE_GROUP_ERROR|Operating object is not a storage group|
-|405|QUERY_NOT_ALLOWED|Query statements are not allowed error|
-|406|AST_FORMAT_ERROR|AST format related error|
-|407|LOGICAL_OPERATOR_ERROR|Logical operator related error|
-|408|LOGICAL_OPTIMIZE_ERROR|Logical optimize related error|
-|409|UNSUPPORTED_FILL_TYPE_ERROR|Unsupported fill type related error|
-|410|PATH_ERROR|Path related error|
-|411|QUERY_PROCESS_ERROR|Query process related error|
-|412|WRITE_PROCESS_ERROR|Writing data related error|
-|500|INTERNAL_SERVER_ERROR|Internal server error|
-|501|CLOSE_OPERATION_ERROR|Meet error in close operation|
-|502|READ_ONLY_SYSTEM_ERROR|Operating system is read only|
-|503|DISK_SPACE_INSUFFICIENT_ERROR|Disk space is insufficient|
-|504|START_UP_ERROR|Meet error while starting up|
-|600|WRONG_LOGIN_PASSWORD_ERROR|Username or password is wrong|
-|601|NOT_LOGIN_ERROR|Has not logged in|
-|602|NO_PERMISSION_ERROR|No permissions for this operation|
-|603|UNINITIALIZED_AUTH_ERROR|Uninitialized authorizer|
-
-> All exceptions are refactored in latest version by extracting uniform 
message into exception classes. Different error codes are added to all 
exceptions. When an exception is caught and a higher-level exception is thrown, 
the error code will keep and pass so that users will know the detailed error 
reason.
-A base exception class "ProcessException" is also added to be extended by all 
exceptions.
diff --git a/docs/UserGuide/4-Client/7-Status Codes.md 
b/docs/UserGuide/4-Client/7-Status Codes.md
index 09a797b..d59c4a6 100644
--- a/docs/UserGuide/4-Client/7-Status Codes.md 
+++ b/docs/UserGuide/4-Client/7-Status Codes.md 
@@ -19,54 +19,75 @@
 
 -->
 
-# Status codes
+# Status Codes
 
-For each request, the client will receive a status code. 
-If a SQL is not successfully, the status code and some message will be 
returned. 
+**Status Code** is introduced in the latest version. For example, as IoTDB 
requires registering the time series first before writing data, a kind of 
solution is:
 
-Current status codes:
+```
+try {
+    writeData();
+} catch (SQLException e) {
+  // the most case is that the time series does not exist
+  if (e.getMessage().contains("exist")) {
+      //However, using the content of the error message is not so efficient
+      registerTimeSeries();
+      //write data once again
+      writeData();
+  }
+}
 
-  SUCCESS_STATUS(200),
-  STILL_EXECUTING_STATUS(201),
-  INVALID_HANDLE_STATUS(202),
-  INCOMPATIBLE_VERSION(203),
-  NODE_DELETE_FAILED_ERROR(298),
-  ALIAS_ALREADY_EXIST_ERROR(299),
-  TIMESERIES_ALREADY_EXIST_ERROR(300),
-  TIMESERIES_NOT_EXIST_ERROR(301),
-  UNSUPPORTED_FETCH_METADATA_OPERATION_ERROR(302),
-  METADATA_ERROR(303),
-  OUT_OF_TTL_ERROR(305),
-  CONFIG_ADJUSTER(306),
-  MERGE_ERROR(307),
-  SYSTEM_CHECK_ERROR(308),
-  SYNC_DEVICE_OWNER_CONFLICT_ERROR(309),
-  SYNC_CONNECTION_EXCEPTION(310),
-  STORAGE_GROUP_PROCESSOR_ERROR(311),
-  STORAGE_GROUP_ERROR(312),
-  STORAGE_ENGINE_ERROR(313),
-  TSFILE_PROCESSOR_ERROR(314),
-  PATH_ILLEGAL(315),
-  LOAD_FILE_ERROR(316),
-  EXECUTE_STATEMENT_ERROR(400),
-  SQL_PARSE_ERROR(401),
-  GENERATE_TIME_ZONE_ERROR(402),
-  SET_TIME_ZONE_ERROR(403),
-  NOT_STORAGE_GROUP_ERROR(404),
-  QUERY_NOT_ALLOWED(405),
-  AST_FORMAT_ERROR(406),
-  LOGICAL_OPERATOR_ERROR(407),
-  LOGICAL_OPTIMIZE_ERROR(408),
-  UNSUPPORTED_FILL_TYPE_ERROR(409),
-  PATH_ERROR(410),
-  QUERY_PROCESS_ERROR(411),
-  WRITE_PROCESS_ERROR(412),
-  INTERNAL_SERVER_ERROR(500),
-  CLOSE_OPERATION_ERROR(501),
-  READ_ONLY_SYSTEM_ERROR(502),
-  DISK_SPACE_INSUFFICIENT_ERROR(503),
-  START_UP_ERROR(504),
-  WRONG_LOGIN_PASSWORD_ERROR(600),
-  NOT_LOGIN_ERROR(601),
-  NO_PERMISSION_ERROR(602),
-  UNINITIALIZED_AUTH_ERROR(603)
\ No newline at end of file
+```
+
+With Status Code, instead of writing codes like `if 
(e.getErrorMessage().contains("exist"))`, we can simply use `e.getErrorCode() 
== TSStatusCode.TIME_SERIES_NOT_EXIST_ERROR.getStatusCode()`.
+
+Here is a list of Status Code and related message:
+
+|Status Code|Status Type|Meanings|
+|:---|:---|:---|
+|200|SUCCESS_STATUS||
+|201|STILL_EXECUTING_STATUS||
+|202|INVALID_HANDLE_STATUS||
+|203|INCOMPATIBLE_VERSION|Incompatible version|
+|298|NODE_DELETE_FAILED_ERROR|Failed while deleting node|
+|299|ALIAS_ALREADY_EXIST_ERROR|Alias already exists|
+|300|TIMESERIES_ALREADY_EXIST_ERROR|Timeseries already exists|
+|301|TIMESERIES_NOT_EXIST_ERROR|Timeseries does not exist|
+|302|UNSUPPORTED_FETCH_METADATA_OPERATION_ERROR|Unsupported fetch metadata 
operation|
+|303|METADATA_ERROR|Meet error when dealing with metadata|
+|305|OUT_OF_TTL_ERROR|Insertion time is less than TTL time bound|
+|306|CONFIG_ADJUSTER|IoTDB system load is too large|
+|307|MERGE_ERROR|Meet error while merging|
+|308|SYSTEM_CHECK_ERROR|Meet error while system checking|
+|309|SYNC_DEVICE_OWNER_CONFLICT_ERROR|Sync device owners conflict|
+|310|SYNC_CONNECTION_EXCEPTION|Meet error while sync connecting|
+|311|STORAGE_GROUP_PROCESSOR_ERROR|Storage group processor related error|
+|312|STORAGE_GROUP_ERROR|Storage group related error|
+|313|STORAGE_ENGINE_ERROR|Storage engine related error|
+|314|TSFILE_PROCESSOR_ERROR|TsFile processor related error|
+|315|PATH_ILLEGAL|Illegal path|
+|316|LOAD_FILE_ERROR|Meet error while loading file|
+|400|EXECUTE_STATEMENT_ERROR|Execute statement error|
+|401|SQL_PARSE_ERROR|Meet error while parsing SQL|
+|402|GENERATE_TIME_ZONE_ERROR|Meet error while generating time zone|
+|403|SET_TIME_ZONE_ERROR|Meet error while setting time zone|
+|404|NOT_STORAGE_GROUP_ERROR|Operating object is not a storage group|
+|405|QUERY_NOT_ALLOWED|Query statements are not allowed error|
+|406|AST_FORMAT_ERROR|AST format related error|
+|407|LOGICAL_OPERATOR_ERROR|Logical operator related error|
+|408|LOGICAL_OPTIMIZE_ERROR|Logical optimize related error|
+|409|UNSUPPORTED_FILL_TYPE_ERROR|Unsupported fill type related error|
+|410|PATH_ERROR|Path related error|
+|411|QUERY_PROCESS_ERROR|Query process related error|
+|412|WRITE_PROCESS_ERROR|Writing data related error|
+|500|INTERNAL_SERVER_ERROR|Internal server error|
+|501|CLOSE_OPERATION_ERROR|Meet error in close operation|
+|502|READ_ONLY_SYSTEM_ERROR|Operating system is read only|
+|503|DISK_SPACE_INSUFFICIENT_ERROR|Disk space is insufficient|
+|504|START_UP_ERROR|Meet error while starting up|
+|600|WRONG_LOGIN_PASSWORD_ERROR|Username or password is wrong|
+|601|NOT_LOGIN_ERROR|Has not logged in|
+|602|NO_PERMISSION_ERROR|No permissions for this operation|
+|603|UNINITIALIZED_AUTH_ERROR|Uninitialized authorizer|
+
+> All exceptions are refactored in latest version by extracting uniform 
message into exception classes. Different error codes are added to all 
exceptions. When an exception is caught and a higher-level exception is thrown, 
the error code will keep and pass so that users will know the detailed error 
reason.
+A base exception class "ProcessException" is also added to be extended by all 
exceptions.
diff --git a/docs/UserGuide/7-Ecosystem Integration/2-MapReduce TsFile.md 
b/docs/UserGuide/7-Ecosystem Integration/2-MapReduce TsFile.md
index 8153385..4d825be 100644
--- a/docs/UserGuide/7-Ecosystem Integration/2-MapReduce TsFile.md      
+++ b/docs/UserGuide/7-Ecosystem Integration/2-MapReduce TsFile.md      
@@ -47,7 +47,7 @@ With this connector, you can
 
 |Hadoop Version | Java Version | TsFile Version|
 |-------------  | ------------ |------------ |
-| `2.7.3`       | `1.8`        | `0.8.0`|
+| `2.7.3`       | `1.8`        | `0.10.0`|
 
 > Note: For more information about how to download and use TsFile, please see 
 > the following link: 
 > https://github.com/apache/incubator-iotdb/tree/master/tsfile.
 
diff --git a/docs/UserGuide/8-Architecture/3-Shared Nothing Cluster.md 
b/docs/UserGuide/8-Architecture/3-Shared Nothing Cluster.md
index 2ec0348..238af74 100644
--- a/docs/UserGuide/8-Architecture/3-Shared Nothing Cluster.md 
+++ b/docs/UserGuide/8-Architecture/3-Shared Nothing Cluster.md 
@@ -21,4 +21,4 @@
 
 # Shared Nothing Architecture
 
-Shared Nothing Architecture is under development and is expected to be 
released by the end of 2019. Please wait patiently and look forward to it.
\ No newline at end of file
+Shared Nothing Architecture is under development. Please wait patiently and 
look forward to it.
\ No newline at end of file
diff --git a/docs/zh/SystemDesign/1-TsFile/2-Format.md 
b/docs/zh/SystemDesign/1-TsFile/2-Format.md
index 4474cf1..803034d 100644
--- a/docs/zh/SystemDesign/1-TsFile/2-Format.md
+++ b/docs/zh/SystemDesign/1-TsFile/2-Format.md
@@ -390,83 +390,224 @@ Linux or MacOs:
 
 - 注意: 如果没有设置输出文件的存储路径, 将使用 "TsFile_sketch_view.txt" 做为默认值。 
 
-在Windows系统中的示例:
+在mac系统中的示例:
 
-```bat
-D:\incubator-iotdb\server\target\iotdb-server-0.10.0\tools\tsfileToolSet>.\print-tsfile-sketch.bat
 D:\data\data\sequence\root.vehicle\1572496142067-101-0.tsfile
+```$xslt
+/incubator-iotdb/server/target/iotdb-server-0.10.0/tools/tsfileToolSet$ 
./print-tsfile-sketch.sh test.tsfile
 ​````````````````````````
 Starting Printing the TsFile Sketch
 ​````````````````````````
-TsFile path:D:\data\data\sequence\root.vehicle\1572496142067-101-0.tsfile
+TsFile path:test.tsfile
 Sketch save path:TsFile_sketch_view.txt
 -------------------------------- TsFile Sketch --------------------------------
-file path: D:\data\data\sequence\root.vehicle\1572496142067-101-0.tsfile
-file length: 187382
-
-            POSITION|   CONTENT
-            --------    -------
-                   0|   [magic head] TsFile
-                   6|   [version number] 000001
-|||||||||||||||||||||   [Chunk Group] of root.vehicle.d0 begins at pos 12, 
ends at pos 186469, version:102, num of Chunks:6
-                  12|   [Chunk] of s3, numOfPoints:10600, time 
range:[3000,13599], tsDataType:TEXT,
-                        
TsDigest:[min_value:A,max_value:E,first_value:A,last_value:E,sum_value:0.0]
-                    |           [marker] 1
-                    |           [ChunkHeader]
-                    |           11 pages
-               55718|   [Chunk] of s4, numOfPoints:10600, time 
range:[3000,13599], tsDataType:BOOLEAN,
-                        
TsDigest:[min_value:false,max_value:true,first_value:true,last_value:false,sum_value:0.0]
-                    |           [marker] 1
-                    |           [ChunkHeader]
-                    |           11 pages
-               68848|   [Chunk] of s5, numOfPoints:10600, time 
range:[3000,13599], tsDataType:DOUBLE,
-                        
TsDigest:[min_value:3000.0,max_value:13599.0,first_value:3000.0,last_value:13599.0,sum_value:8.79747E7]
-                    |           [marker] 1
-                    |           [ChunkHeader]
-                    |           11 pages
-               98474|   [Chunk] of s0, numOfPoints:21900, time 
range:[3000,100999], tsDataType:INT32,
-                        
TsDigest:[min_value:0,max_value:99,first_value:0,last_value:19,sum_value:889750.0]
-                    |           [marker] 1
-                    |           [ChunkHeader]
-                    |           22 pages
-              123369|   [Chunk] of s1, numOfPoints:21900, time 
range:[3000,100999], tsDataType:INT64,
-                        
TsDigest:[min_value:0,max_value:39,first_value:8,last_value:19,sum_value:300386.0]
-                    |           [marker] 1
-                    |           [ChunkHeader]
-                    |           22 pages
-              144741|   [Chunk] of s2, numOfPoints:21900, time 
range:[3000,100999], tsDataType:FLOAT,
-                        
TsDigest:[min_value:0.0,max_value:122.0,first_value:8.0,last_value:52.0,sum_value:778581.0]
-                    |           [marker] 1
-                    |           [ChunkHeader]
-                    |           22 pages
-              186437|   [Chunk Group Footer]
-                    |           [marker] 0
-                    |           [deviceID] root.vehicle.d0
-                    |           [dataSize] 186425
-                    |           [num of chunks] 6
-|||||||||||||||||||||   [Chunk Group] of root.vehicle.d0 ends
-              186469|   [marker] 2
-              186470|   [TsDeviceMetadata] of root.vehicle.d0, startTime:3000, 
endTime:100999
-                    |           [startTime] 3000tfi
-                    |           [endTime] 100999
-                    |           [num of ChunkGroupMetaData] 1
-                    |           1 ChunkGroupMetaData
-              187133|   [TsFileMetaData]
-                    |           [num of devices] 1
-                    |           1 key&TsDeviceMetadataIndex
-                    |           [num of measurements] 6
-                    |           6 key&measurementSchema
-                    |           [createBy isNotNull] false
-                    |           [totalChunkNum] 6
-                    |           [invalidChunkNum] 0
-                    |           [bloom filter bit vector byte array length] 31
-                    |           [bloom filter bit vector byte array]
-                    |           [bloom filter number of bits] 256
-                    |           [bloom filter number of hash functions] 5
-              187372|   [TsFileMetaDataSize] 239
-              187376|   [magic tail] TsFile
-              187382|   END of TsFile
+file path: test.tsfile
+file length: 33436
+
+            POSITION| CONTENT
+            --------  -------
+                   0| [magic head] TsFile
+                   6| [version number] 000002
+||||||||||||||||||||| [Chunk Group] of root.group_12.d2, num of Chunks:3
+                  12| [Chunk] of s_INT64e_RLE, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT64, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   2 pages
+                 677| [Chunk] of s_INT64e_TS_2DIFF, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT64, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+                1349| [Chunk] of s_INT64e_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT64, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   2 pages
+                5766| [Chunk Group Footer]
+                    |   [marker] 0
+                    |   [deviceID] root.group_12.d2
+                    |   [dataSize] 5754
+                    |   [num of chunks] 3
+||||||||||||||||||||| [Chunk Group] of root.group_12.d2 ends
+                5799| [Version Info]
+                    |   [marker] 3
+                    |   [version] 102
+||||||||||||||||||||| [Chunk Group] of root.group_12.d1, num of Chunks:3
+                5808| [Chunk] of s_INT32e_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT32, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+                8231| [Chunk] of s_INT32e_TS_2DIFF, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT32, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+                8852| [Chunk] of s_INT32e_RLE, numOfPoints:10000, time 
range:[1,10000], tsDataType:INT32, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+                9399| [Chunk Group Footer]
+                    |   [marker] 0
+                    |   [deviceID] root.group_12.d1
+                    |   [dataSize] 3591
+                    |   [num of chunks] 3
+||||||||||||||||||||| [Chunk Group] of root.group_12.d1 ends
+                9432| [Version Info]
+                    |   [marker] 3
+                    |   [version] 102
+||||||||||||||||||||| [Chunk Group] of root.group_12.d0, num of Chunks:2
+                9441| [Chunk] of s_BOOLEANe_RLE, numOfPoints:10000, time 
range:[1,10000], tsDataType:BOOLEAN, 
+                      startTime: 1 endTime: 10000 count: 10000 
[firstValue:true,lastValue:true]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+                9968| [Chunk] of s_BOOLEANe_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:BOOLEAN, 
+                      startTime: 1 endTime: 10000 count: 10000 
[firstValue:true,lastValue:true]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+               10961| [Chunk Group Footer]
+                    |   [marker] 0
+                    |   [deviceID] root.group_12.d0
+                    |   [dataSize] 1520
+                    |   [num of chunks] 2
+||||||||||||||||||||| [Chunk Group] of root.group_12.d0 ends
+               10994| [Version Info]
+                    |   [marker] 3
+                    |   [version] 102
+||||||||||||||||||||| [Chunk Group] of root.group_12.d5, num of Chunks:1
+               11003| [Chunk] of s_TEXTe_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:TEXT, 
+                      startTime: 1 endTime: 10000 count: 10000 
[firstValue:version_test,lastValue:version_test]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   3 pages
+               19278| [Chunk Group Footer]
+                    |   [marker] 0
+                    |   [deviceID] root.group_12.d5
+                    |   [dataSize] 8275
+                    |   [num of chunks] 1
+||||||||||||||||||||| [Chunk Group] of root.group_12.d5 ends
+               19311| [Version Info]
+                    |   [marker] 3
+                    |   [version] 102
+||||||||||||||||||||| [Chunk Group] of root.group_12.d4, num of Chunks:4
+               19320| [Chunk] of s_DOUBLEe_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:DOUBLE, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00000000123]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   2 pages
+               23740| [Chunk] of s_DOUBLEe_TS_2DIFF, numOfPoints:10000, time 
range:[1,10000], tsDataType:DOUBLE, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000002045]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+               24414| [Chunk] of s_DOUBLEe_GORILLA, numOfPoints:10000, time 
range:[1,10000], tsDataType:DOUBLE, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000002045]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+               25054| [Chunk] of s_DOUBLEe_RLE, numOfPoints:10000, time 
range:[1,10000], tsDataType:DOUBLE, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000001224]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   2 pages
+               25717| [Chunk Group Footer]
+                    |   [marker] 0
+                    |   [deviceID] root.group_12.d4
+                    |   [dataSize] 6397
+                    |   [num of chunks] 4
+||||||||||||||||||||| [Chunk Group] of root.group_12.d4 ends
+               25750| [Version Info]
+                    |   [marker] 3
+                    |   [version] 102
+||||||||||||||||||||| [Chunk Group] of root.group_12.d3, num of Chunks:4
+               25759| [Chunk] of s_FLOATe_GORILLA, numOfPoints:10000, time 
range:[1,10000], tsDataType:FLOAT, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+               26375| [Chunk] of s_FLOATe_PLAIN, numOfPoints:10000, time 
range:[1,10000], tsDataType:FLOAT, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+               28796| [Chunk] of s_FLOATe_RLE, numOfPoints:10000, time 
range:[1,10000], tsDataType:FLOAT, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+               29343| [Chunk] of s_FLOATe_TS_2DIFF, numOfPoints:10000, time 
range:[1,10000], tsDataType:FLOAT, 
+                      startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]
+                    |   [marker] 1
+                    |   [ChunkHeader]
+                    |   1 pages
+               29967| [Chunk Group Footer]
+                    |   [marker] 0
+                    |   [deviceID] root.group_12.d3
+                    |   [dataSize] 4208
+                    |   [num of chunks] 4
+||||||||||||||||||||| [Chunk Group] of root.group_12.d3 ends
+               30000| [Version Info]
+                    |   [marker] 3
+                    |   [version] 102
+               30009| [marker] 2
+               30010| [ChunkMetadataList] of 
root.group_12.d0.s_BOOLEANe_PLAIN, tsDataType:BOOLEAN
+                    | [startTime: 1 endTime: 10000 count: 10000 
[firstValue:true,lastValue:true]] 
+               30066| [ChunkMetadataList] of root.group_12.d0.s_BOOLEANe_RLE, 
tsDataType:BOOLEAN
+                    | [startTime: 1 endTime: 10000 count: 10000 
[firstValue:true,lastValue:true]] 
+               30120| [ChunkMetadataList] of root.group_12.d1.s_INT32e_PLAIN, 
tsDataType:INT32
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
+               30196| [ChunkMetadataList] of root.group_12.d1.s_INT32e_RLE, 
tsDataType:INT32
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
+               30270| [ChunkMetadataList] of 
root.group_12.d1.s_INT32e_TS_2DIFF, tsDataType:INT32
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
+               30349| [ChunkMetadataList] of root.group_12.d2.s_INT64e_PLAIN, 
tsDataType:INT64
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
+               30441| [ChunkMetadataList] of root.group_12.d2.s_INT64e_RLE, 
tsDataType:INT64
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
+               30531| [ChunkMetadataList] of 
root.group_12.d2.s_INT64e_TS_2DIFF, tsDataType:INT64
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1,maxValue:1,firstValue:1,lastValue:1,sumValue:10000.0]] 
+               30626| [ChunkMetadataList] of 
root.group_12.d3.s_FLOATe_GORILLA, tsDataType:FLOAT
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]]
 
+               30704| [ChunkMetadataList] of root.group_12.d3.s_FLOATe_PLAIN, 
tsDataType:FLOAT
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]]
 
+               30780| [ChunkMetadataList] of root.group_12.d3.s_FLOATe_RLE, 
tsDataType:FLOAT
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]]
 
+               30854| [ChunkMetadataList] of 
root.group_12.d3.s_FLOATe_TS_2DIFF, tsDataType:FLOAT
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00023841858]]
 
+               30933| [ChunkMetadataList] of 
root.group_12.d4.s_DOUBLEe_GORILLA, tsDataType:DOUBLE
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000002045]]
 
+               31028| [ChunkMetadataList] of root.group_12.d4.s_DOUBLEe_PLAIN, 
tsDataType:DOUBLE
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.00000000123]]
 
+               31121| [ChunkMetadataList] of root.group_12.d4.s_DOUBLEe_RLE, 
tsDataType:DOUBLE
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000001224]]
 
+               31212| [ChunkMetadataList] of 
root.group_12.d4.s_DOUBLEe_TS_2DIFF, tsDataType:DOUBLE
+                    | [startTime: 1 endTime: 10000 count: 10000 
[minValue:1.1,maxValue:1.1,firstValue:1.1,lastValue:1.1,sumValue:11000.000000002045]]
 
+               31308| [ChunkMetadataList] of root.group_12.d5.s_TEXTe_PLAIN, 
tsDataType:TEXT
+                    | [startTime: 1 endTime: 10000 count: 10000 
[firstValue:version_test,lastValue:version_test]] 
+               32840| [MetadataIndex] of root.group_12.d0
+               32881| [MetadataIndex] of root.group_12.d1
+               32920| [MetadataIndex] of root.group_12.d2
+               32959| [MetadataIndex] of root.group_12.d3
+               33000| [MetadataIndex] of root.group_12.d4
+               33042| [MetadataIndex] of root.group_12.d5
+               33080| [TsFileMetadata]
+                    |   [num of devices] 6
+                    |   6 key&TsMetadataIndex
+                    |   [totalChunkNum] 17
+                    |   [invalidChunkNum] 0
+                    |   [bloom filter bit vector byte array length] 32
+                    |   [bloom filter bit vector byte array] 
+                    |   [bloom filter number of bits] 256
+                    |   [bloom filter number of hash functions] 5
+               33426| [TsFileMetadataSize] 346
+               33430| [magic tail] TsFile
+               33436| END of TsFile
 
 ---------------------------------- TsFile Sketch End 
----------------------------------
+
 ````````````````````````
 
 #### 1.3.4 TsFileSequenceRead
diff --git a/docs/zh/UserGuide/1-Overview/2-Architecture.md 
b/docs/zh/UserGuide/1-Overview/2-Architecture.md
index 00b7868..edbdbe4 100644
--- a/docs/zh/UserGuide/1-Overview/2-Architecture.md
+++ b/docs/zh/UserGuide/1-Overview/2-Architecture.md
@@ -29,6 +29,10 @@ IoTDB套件由若干个组件构成,共同形成“数据收集-数据写入-
 
 
在图1.1中,用户可以通过JDBC将来自设备上传感器采集的时序数据、服务器负载和CPU内存等系统状态数据、消息队列中的时序数据、应用程序的时序数据或者其他数据库中的时序数据导入到本地或者远程的IoTDB中。用户还可以将上述数据直接写成本地(或位于HDFS上)的TsFile文件。
 
-对于写入到IoTDB的数据以及本地的TsFile文件,可以通过同步工具TsFileSync将数据文件同步到HDFS上,进而实现在Hadoop或Spark的数据处理平台上的诸如异常检测、机器学习等数据处理任务。对于分析的结果,可以写回成TsFile文件。
+对于写入到IoTDB的数据以及本地的TsFile文件,可以通过同步工具TsFileSync将数据文件同步到HDFS上,进而实现在Hadoop或Spark的数据处理平台上的诸如异常检测、机器学习等数据处理任务。
+
+对于写入到HDFS或者本地的TsFile文件,可以利用TsFile-Hadoop或TsFile-Spark连接器允许Hadoop或Spark进行数据处理。
+
+对于分析的结果,可以写回成TsFile文件。
 
 IoTDB和TsFile还提供了相应的客户端工具,满足用户查看和写入数据的SQL形式、脚本形式和图形化形式等多种需求。
diff --git a/docs/zh/UserGuide/1-Overview/3-Scenario.md 
b/docs/zh/UserGuide/1-Overview/3-Scenario.md
index 52bd644..4564d55 100644
--- a/docs/zh/UserGuide/1-Overview/3-Scenario.md
+++ b/docs/zh/UserGuide/1-Overview/3-Scenario.md
@@ -31,9 +31,9 @@
 
 <img style="width:100%; max-width:800px; max-height:600px; margin-left:auto; 
margin-right:auto; display:block;" 
src="https://user-images.githubusercontent.com/13203019/51579014-695ef980-1efa-11e9-8cbc-e9e7ee4fa0d8.png";>
 
-在场景1中,仅需要TsFile、TsFileSync部署在一台PC上,此外还需要Hadoop/Spark集群。其示意图如图1.2所示。图1.3展示了此时的应用架构。
+在场景1中,仅需要TsFile、TsFileSync部署在一台PC上,此外还需要部署Hadoop/Spark连接器用于数据中心端Hadoop/Spark集群的数据存储和分析。其示意图如图1.2所示。图1.3展示了此时的应用架构。
 
-<img style="width:100%; max-width:800px; max-height:600px; margin-left:auto; 
margin-right:auto; display:block;" 
src="https://user-images.githubusercontent.com/13203019/51579026-77ad1580-1efa-11e9-8345-564b22d70286.jpg";>
+<img style="width:100%; max-width:800px; max-height:600px; margin-left:auto; 
margin-right:auto; display:block;" 
src="https://user-images.githubusercontent.com/25913899/81768490-bf034f00-950d-11ea-9b56-fef3edca0958.png";>
 
 ## 场景2
 
@@ -41,7 +41,7 @@
 
 为了保证发电机的正常运转并对发电机及时监控和分析,公司需要收集这些传感器信息,在发电机工作环境中进行部分计算和分析,还需要将收集的原始信息上传到数据中心。
 
-此时可以采用IoTDB套件中的IoTDB、TsFileSync工具和Hadoop/Spark集成组件等。需要部署一个场控PC机,其上安装IoTDB和TsFileSync工具,用于支持读写数据、本地计算和分析以及上传数据到数据中心。此外还需要部署Hadoop/Spark集群用于数据中心端的数据存储和分析。如图1.4所示。
+此时可以采用IoTDB套件中的IoTDB、TsFileSync工具和Hadoop/Spark集成组件等。需要部署一个场控PC机,其上安装IoTDB和TsFileSync工具,用于支持读写数据、本地计算和分析以及上传数据到数据中心。此外还需要部署Hadoop/Spark连接器用于数据中心端Hadoop/Spark集群的数据存储和分析。如图1.4所示。
 
 <img style="width:100%; max-width:800px; max-height:600px; margin-left:auto; 
margin-right:auto; display:block;" 
src="https://user-images.githubusercontent.com/13203019/51579033-7ed42380-1efa-11e9-889f-fb4180291a9e.png";>
 
@@ -55,13 +55,13 @@
 
 
为了保证机械手的监控数据能够及时监控和分析,公司需要收集这些机械手传感器信息,将其发送至可以连接外部网络的服务器上,而后将原始数据信息上传到数据中心进行复杂的计算和分析。
 
-此时,可以采用IoTDB套件中的IoTDB、IoTDB-Client工具、TsFileSync工具和Hadoop/Spark集成组件等。将IoTDB服务器安装在工厂连接外网的服务器上,用户接收机械手传输的数据并将数据上传到数据中心。将IoTDB-Client工具安装在每一个连接工厂内网的机械手上,用于将传感器产生的实时数据上传到工厂内部服务器。再使用TsFileSync工具将原始数据上传到数据中心。此外还需要部署Hadoop/Spark集群用于数据中心端的数据存储和分析。如图1.6中间场景所示。
+此时,可以采用IoTDB套件中的IoTDB、IoTDB-Client工具、TsFileSync工具和Hadoop/Spark集成组件等。将IoTDB服务器安装在工厂连接外网的服务器上,用户接收机械手传输的数据并将数据上传到数据中心。将IoTDB-Client工具安装在每一个连接工厂内网的机械手上,用于将传感器产生的实时数据上传到工厂内部服务器。再使用TsFileSync工具将原始数据上传到数据中心。此外还需要部署Hadoop/Spark连接器用于数据中心端Hadoop/Spark集群的数据存储和分析。如图1.6中间场景所示。
 
 <img style="width:100%; max-width:800px; max-height:600px; margin-left:auto; 
margin-right:auto; display:block;" 
src="https://user-images.githubusercontent.com/13203019/51579080-96aba780-1efa-11e9-87ac-940c45b19dd7.jpg";>
 
 图1.7给出了此时的应用架构。
 
-<img style="width:100%; max-width:800px; max-height:600px; margin-left:auto; 
margin-right:auto; display:block;" 
src="https://user-images.githubusercontent.com/13203019/51579085-9dd2b580-1efa-11e9-97b9-f56bc8d342b0.jpg";>
+<img style="width:100%; max-width:800px; max-height:600px; margin-left:auto; 
margin-right:auto; display:block;" 
src="https://user-images.githubusercontent.com/25913899/81768477-b874d780-950d-11ea-80ca-8807b9bd0970.png";>
 
 ## 场景4
 
diff --git a/docs/zh/UserGuide/4-Client/3-Programming - JDBC.md 
b/docs/zh/UserGuide/4-Client/3-Programming - JDBC.md
index 7da7eb2..c1cccee 100644
--- a/docs/zh/UserGuide/4-Client/3-Programming - JDBC.md        
+++ b/docs/zh/UserGuide/4-Client/3-Programming - JDBC.md        
@@ -207,77 +207,3 @@ public class JDBCExample {
   }
 }
 ```
-
-
-## Status Code
-
-在最新版本中引入了**状态码**这一概念。例如,因为IoTDB需要在写入数据之前首先注册时间序列,一种可能的解决方案是:
-
-```
-try {
-    writeData();
-} catch (SQLException e) {
-  // the most case is that the time series does not exist
-  if (e.getMessage().contains("exist")) {
-      //However, using the content of the error message is not so efficient
-      registerTimeSeries();
-      //write data once again
-      writeData();
-  }
-}
-
-```
-
-利用状态码,我们就可以不必写诸如`if 
(e.getErrorMessage().contains("exist"))`的代码,只需要使用`e.getStatusType().getCode() 
== TSStatusCode.TIME_SERIES_NOT_EXIST_ERROR.getStatusCode()`。
-
-这里是状态码和相对应信息的列表:
-
-|状态码|状态类型|状态信息|
-|:---|:---|:---|
-|200|SUCCESS_STATUS||
-|201|STILL_EXECUTING_STATUS||
-|202|INVALID_HANDLE_STATUS||
-|203|INCOMPATIBLE_VERSION|版本不兼容|
-|298|NODE_DELETE_FAILED_ERROR|删除节点失败|
-|299|ALIAS_ALREADY_EXIST_ERROR|路径别名已经存在|
-|300|TIMESERIES_ALREADY_EXIST_ERROR|时间序列已经存在|
-|301|TIMESERIES_NOT_EXIST_ERROR|时间序列不存在|
-|302|UNSUPPORTED_FETCH_METADATA_OPERATION_ERROR|不支持的获取元数据操作|
-|303|METADATA_ERROR|处理元数据错误|
-|305|OUT_OF_TTL_ERROR|插入时间少于TTL时间边界|
-|306|CONFIG_ADJUSTER|IoTDB系统负载过大|
-|307|MERGE_ERROR|合并错误|
-|308|SYSTEM_CHECK_ERROR|系统检查错误|
-|309|SYNC_DEVICE_OWNER_CONFLICT_ERROR|回传设备冲突错误|
-|310|SYNC_CONNECTION_EXCEPTION|回传连接错误|
-|311|STORAGE_GROUP_PROCESSOR_ERROR|存储组处理器相关错误|
-|312|STORAGE_GROUP_ERROR|存储组相关错误|
-|313|STORAGE_ENGINE_ERROR|存储引擎相关错误|
-|314|TSFILE_PROCESSOR_ERROR|TsFile处理器相关错误|
-|315|PATH_ILLEGAL|路径不合法|
-|316|LOAD_FILE_ERROR|加载文件错误|
-|400|EXECUTE_STATEMENT_ERROR|执行语句错误|
-|401|SQL_PARSE_ERROR|SQL语句分析错误|
-|402|GENERATE_TIME_ZONE_ERROR|生成时区错误|
-|403|SET_TIME_ZONE_ERROR|设置时区错误|
-|404|NOT_STORAGE_GROUP_ERROR|操作对象不是存储组|
-|405|QUERY_NOT_ALLOWED|查询语句不允许|
-|406|AST_FORMAT_ERROR|AST格式相关错误|
-|407|LOGICAL_OPERATOR_ERROR|逻辑符相关错误|
-|408|LOGICAL_OPTIMIZE_ERROR|逻辑优化相关错误|
-|409|UNSUPPORTED_FILL_TYPE_ERROR|不支持的填充类型|
-|410|PATH_ERROR|路径相关错误|
-|411|QUERY_PROCESS_ERROR|查询处理相关错误|
-|412|WRITE_PROCESS_ERROR|写入相关错误|
-|500|INTERNAL_SERVER_ERROR|服务器内部错误|
-|501|CLOSE_OPERATION_ERROR|关闭操作错误|
-|502|READ_ONLY_SYSTEM_ERROR|系统只读|
-|503|DISK_SPACE_INSUFFICIENT_ERROR|磁盘空间不足|
-|504|START_UP_ERROR|启动错误|
-|600|WRONG_LOGIN_PASSWORD_ERROR|用户名或密码错误|
-|601|NOT_LOGIN_ERROR|没有登录|
-|602|NO_PERMISSION_ERROR|没有操作权限|
-|603|UNINITIALIZED_AUTH_ERROR|授权人未初始化|
-
-> 
在最新版本中,我们重构了IoTDB的异常类。通过将错误信息统一提取到异常类中,并为所有异常添加不同的错误代码,从而当捕获到异常并引发更高级别的异常时,错误代码将保留并传递,以便用户了解详细的错误原因。
-除此之外,我们添加了一个基础异常类“ProcessException”,由所有异常扩展。
\ No newline at end of file
diff --git a/docs/zh/UserGuide/4-Client/7-Status Codes.md 
b/docs/zh/UserGuide/4-Client/7-Status Codes.md
index ff9c8c1..61c1e66 100644
--- a/docs/zh/UserGuide/4-Client/7-Status Codes.md      
+++ b/docs/zh/UserGuide/4-Client/7-Status Codes.md      
@@ -19,53 +19,75 @@
 
 -->
 
-# Status codes
+# Status Codes
 
-对于每个SQL请求,都会返回一个结果码;
-若SQL运行失败,客户端会收到错误码和一段错误消息。
-目前的结果码定义如下:
+在最新版本中引入了**状态码**这一概念。例如,因为IoTDB需要在写入数据之前首先注册时间序列,一种可能的解决方案是:
 
-  SUCCESS_STATUS(200),
-  STILL_EXECUTING_STATUS(201),
-  INVALID_HANDLE_STATUS(202),
-  INCOMPATIBLE_VERSION(203),
-  NODE_DELETE_FAILED_ERROR(298),
-  ALIAS_ALREADY_EXIST_ERROR(299),
-  TIMESERIES_ALREADY_EXIST_ERROR(300),
-  TIMESERIES_NOT_EXIST_ERROR(301),
-  UNSUPPORTED_FETCH_METADATA_OPERATION_ERROR(302),
-  METADATA_ERROR(303),
-  OUT_OF_TTL_ERROR(305),
-  CONFIG_ADJUSTER(306),
-  MERGE_ERROR(307),
-  SYSTEM_CHECK_ERROR(308),
-  SYNC_DEVICE_OWNER_CONFLICT_ERROR(309),
-  SYNC_CONNECTION_EXCEPTION(310),
-  STORAGE_GROUP_PROCESSOR_ERROR(311),
-  STORAGE_GROUP_ERROR(312),
-  STORAGE_ENGINE_ERROR(313),
-  TSFILE_PROCESSOR_ERROR(314),
-  PATH_ILLEGAL(315),
-  LOAD_FILE_ERROR(316),
-  EXECUTE_STATEMENT_ERROR(400),
-  SQL_PARSE_ERROR(401),
-  GENERATE_TIME_ZONE_ERROR(402),
-  SET_TIME_ZONE_ERROR(403),
-  NOT_STORAGE_GROUP_ERROR(404),
-  QUERY_NOT_ALLOWED(405),
-  AST_FORMAT_ERROR(406),
-  LOGICAL_OPERATOR_ERROR(407),
-  LOGICAL_OPTIMIZE_ERROR(408),
-  UNSUPPORTED_FILL_TYPE_ERROR(409),
-  PATH_ERROR(410),
-  QUERY_PROCESS_ERROR(411),
-  WRITE_PROCESS_ERROR(412),
-  INTERNAL_SERVER_ERROR(500),
-  CLOSE_OPERATION_ERROR(501),
-  READ_ONLY_SYSTEM_ERROR(502),
-  DISK_SPACE_INSUFFICIENT_ERROR(503),
-  START_UP_ERROR(504),
-  WRONG_LOGIN_PASSWORD_ERROR(600),
-  NOT_LOGIN_ERROR(601),
-  NO_PERMISSION_ERROR(602),
-  UNINITIALIZED_AUTH_ERROR(603),
\ No newline at end of file
+```
+try {
+    writeData();
+} catch (SQLException e) {
+  // the most case is that the time series does not exist
+  if (e.getMessage().contains("exist")) {
+      //However, using the content of the error message is not so efficient
+      registerTimeSeries();
+      //write data once again
+      writeData();
+  }
+}
+
+```
+
+利用状态码,我们就可以不必写诸如`if 
(e.getErrorMessage().contains("exist"))`的代码,只需要使用`e.getStatusType().getCode() 
== TSStatusCode.TIME_SERIES_NOT_EXIST_ERROR.getStatusCode()`。
+
+这里是状态码和相对应信息的列表:
+
+|状态码|状态类型|状态信息|
+|:---|:---|:---|
+|200|SUCCESS_STATUS||
+|201|STILL_EXECUTING_STATUS||
+|202|INVALID_HANDLE_STATUS||
+|203|INCOMPATIBLE_VERSION|版本不兼容|
+|298|NODE_DELETE_FAILED_ERROR|删除节点失败|
+|299|ALIAS_ALREADY_EXIST_ERROR|路径别名已经存在|
+|300|TIMESERIES_ALREADY_EXIST_ERROR|时间序列已经存在|
+|301|TIMESERIES_NOT_EXIST_ERROR|时间序列不存在|
+|302|UNSUPPORTED_FETCH_METADATA_OPERATION_ERROR|不支持的获取元数据操作|
+|303|METADATA_ERROR|处理元数据错误|
+|305|OUT_OF_TTL_ERROR|插入时间少于TTL时间边界|
+|306|CONFIG_ADJUSTER|IoTDB系统负载过大|
+|307|MERGE_ERROR|合并错误|
+|308|SYSTEM_CHECK_ERROR|系统检查错误|
+|309|SYNC_DEVICE_OWNER_CONFLICT_ERROR|回传设备冲突错误|
+|310|SYNC_CONNECTION_EXCEPTION|回传连接错误|
+|311|STORAGE_GROUP_PROCESSOR_ERROR|存储组处理器相关错误|
+|312|STORAGE_GROUP_ERROR|存储组相关错误|
+|313|STORAGE_ENGINE_ERROR|存储引擎相关错误|
+|314|TSFILE_PROCESSOR_ERROR|TsFile处理器相关错误|
+|315|PATH_ILLEGAL|路径不合法|
+|316|LOAD_FILE_ERROR|加载文件错误|
+|400|EXECUTE_STATEMENT_ERROR|执行语句错误|
+|401|SQL_PARSE_ERROR|SQL语句分析错误|
+|402|GENERATE_TIME_ZONE_ERROR|生成时区错误|
+|403|SET_TIME_ZONE_ERROR|设置时区错误|
+|404|NOT_STORAGE_GROUP_ERROR|操作对象不是存储组|
+|405|QUERY_NOT_ALLOWED|查询语句不允许|
+|406|AST_FORMAT_ERROR|AST格式相关错误|
+|407|LOGICAL_OPERATOR_ERROR|逻辑符相关错误|
+|408|LOGICAL_OPTIMIZE_ERROR|逻辑优化相关错误|
+|409|UNSUPPORTED_FILL_TYPE_ERROR|不支持的填充类型|
+|410|PATH_ERROR|路径相关错误|
+|411|QUERY_PROCESS_ERROR|查询处理相关错误|
+|412|WRITE_PROCESS_ERROR|写入相关错误|
+|500|INTERNAL_SERVER_ERROR|服务器内部错误|
+|501|CLOSE_OPERATION_ERROR|关闭操作错误|
+|502|READ_ONLY_SYSTEM_ERROR|系统只读|
+|503|DISK_SPACE_INSUFFICIENT_ERROR|磁盘空间不足|
+|504|START_UP_ERROR|启动错误|
+|600|WRONG_LOGIN_PASSWORD_ERROR|用户名或密码错误|
+|601|NOT_LOGIN_ERROR|没有登录|
+|602|NO_PERMISSION_ERROR|没有操作权限|
+|603|UNINITIALIZED_AUTH_ERROR|授权人未初始化|
+
+> 
在最新版本中,我们重构了IoTDB的异常类。通过将错误信息统一提取到异常类中,并为所有异常添加不同的错误代码,从而当捕获到异常并引发更高级别的异常时,错误代码将保留并传递,以便用户了解详细的错误原因。
+除此之外,我们添加了一个基础异常类“ProcessException”,由所有异常扩展。
\ No newline at end of file
diff --git 
a/server/src/main/java/org/apache/iotdb/db/tools/TsFileSketchTool.java 
b/server/src/main/java/org/apache/iotdb/db/tools/TsFileSketchTool.java
index 840fa73..7a2c6c6 100644
--- a/server/src/main/java/org/apache/iotdb/db/tools/TsFileSketchTool.java
+++ b/server/src/main/java/org/apache/iotdb/db/tools/TsFileSketchTool.java
@@ -23,6 +23,7 @@ import java.io.FileWriter;
 import java.io.IOException;
 import java.io.PrintWriter;
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
 import java.util.TreeMap;
@@ -44,14 +45,9 @@ import org.apache.iotdb.tsfile.utils.Pair;
 public class TsFileSketchTool {
 
   public static void main(String[] args) throws IOException {
-    String filename = "test.tsfile";
-    String outFile = "TsFile_sketch_view.txt";
-    if (args.length == 1) {
-      filename = args[0];
-    } else if (args.length == 2) {
-      filename = args[0];
-      outFile = args[1];
-    }
+    Pair<String, String> fileNames = checkArgs(args);
+    String filename = fileNames.left;
+    String outFile = fileNames.right;
     System.out.println("TsFile path:" + filename);
     System.out.println("Sketch save path:" + outFile);
     try (PrintWriter pw = new PrintWriter(new FileWriter(outFile))) {
@@ -67,6 +63,10 @@ public class TsFileSketchTool {
       List<ChunkGroupMetadata> allChunkGroupMetadata = new ArrayList<>();
       List<Pair<Long, Long>> versionInfo = new ArrayList<>();
       reader.selfCheck(null, allChunkGroupMetadata, versionInfo, false);
+      Map<Long, Long> versionMap = new HashMap<>();
+      for (Pair<Long, Long> versionPair : versionInfo) {
+        versionMap.put(versionPair.left - Long.BYTES - 1, versionPair.right);
+      }
 
       // begin print
       StringBuilder str1 = new StringBuilder();
@@ -83,8 +83,7 @@ public class TsFileSketchTool {
               + "|\t[version number] "
               + reader.readVersionNumber());
       // ChunkGroup begins
-      for (int i = 0; i < allChunkGroupMetadata.size(); i++) {
-        ChunkGroupMetadata chunkGroupMetadata = allChunkGroupMetadata.get(i);
+      for (ChunkGroupMetadata chunkGroupMetadata : allChunkGroupMetadata) {
         printlnBoth(pw, str1.toString() + "\t[Chunk Group] of " + 
chunkGroupMetadata.getDevice() +
             ", num of Chunks:" + 
chunkGroupMetadata.getChunkMetadataList().size());
         // chunk begins
@@ -117,15 +116,16 @@ public class TsFileSketchTool {
             .getNumberOfChunks());
         printlnBoth(pw, str1.toString() + "\t[Chunk Group] of "
             + chunkGroupMetadata.getDevice() + " ends");
-        // versionInfo pair begins
-        printlnBoth(pw, 
-            String.format("%20s", chunkEndPos + 
chunkGroupFooter.getSerializedSize()) 
-            + "|\t[Version Info pair]");
-        printlnBoth(pw, String.format("%20s", "") + "|\t\t[marker] 3");
-        printlnBoth(pw,
-            String.format("%20s", "") + "|\t\t[offset] " + 
versionInfo.get(i).left);
-        printlnBoth(pw,
-            String.format("%20s", "") + "|\t\t[version] " + 
versionInfo.get(i).right);
+        // versionInfo begins if there is a versionInfo
+        if (versionMap.containsKey(chunkEndPos + 
chunkGroupFooter.getSerializedSize())) {
+          printlnBoth(pw, 
+              String.format("%20s", chunkEndPos + 
chunkGroupFooter.getSerializedSize()) 
+              + "|\t[Version Info]");
+          printlnBoth(pw, String.format("%20s", "") + "|\t\t[marker] 3");
+          printlnBoth(pw,
+              String.format("%20s", "") + "|\t\t[version] " 
+              + versionMap.get(chunkEndPos + 
chunkGroupFooter.getSerializedSize()));
+        }
         
       }
 
@@ -146,7 +146,6 @@ public class TsFileSketchTool {
         for (TimeseriesMetadata seriesMetadata : seriesMetadataList) {
           
timeseriesMetadataMap.put(seriesMetadata.getOffsetOfChunkMetaDataList(), 
               new Pair<>(new Path(device, seriesMetadata.getMeasurementId()), 
seriesMetadata));
-          
         }
       }
       for (Map.Entry<Long, Pair<Path, TimeseriesMetadata>> entry : 
timeseriesMetadataMap.entrySet()) {
@@ -213,4 +212,16 @@ public class TsFileSketchTool {
     pw.println(str);
   }
 
+  private static Pair<String, String> checkArgs(String[] args) {
+    String filename = "test.tsfile";
+    String outFile = "TsFile_sketch_view.txt";
+    if (args.length == 1) {
+      filename = args[0];
+    } else if (args.length == 2) {
+      filename = args[0];
+      outFile = args[1];
+    }
+    return new Pair<>(filename, outFile);
+  }
+
 }

Reply via email to