JackieTien97 commented on code in PR #16035:
URL: https://github.com/apache/iotdb/pull/16035#discussion_r2244208985


##########
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/common/schematree/ClusterSchemaTree.java:
##########
@@ -485,59 +492,151 @@ public void serialize(OutputStream outputStream) throws 
IOException {
     root.serialize(outputStream);
   }
 
-  public static ClusterSchemaTree deserialize(InputStream inputStream) throws 
IOException {
+  public Iterator<SchemaNode> getIteratorForSerialize() {
+    return new SchemaNodePostOrderIterator(root);
+  }
 
-    byte nodeType;
-    int childNum;
-    Deque<SchemaNode> stack = new ArrayDeque<>();
-    SchemaNode child;
-    boolean hasLogicalView = false;
-    boolean hasNormalTimeSeries = false;
-    Map<Integer, Template> templateMap = new HashMap<>();
-
-    while (inputStream.available() > 0) {
-      nodeType = ReadWriteIOUtils.readByte(inputStream);
-      if (nodeType == SCHEMA_MEASUREMENT_NODE) {
-        SchemaMeasurementNode measurementNode = 
SchemaMeasurementNode.deserialize(inputStream);
-        stack.push(measurementNode);
-        if (measurementNode.isLogicalView()) {
-          hasLogicalView = true;
+  @Override
+  public long ramBytesUsed() {
+    if (ramBytesUsed > 0) {
+      return ramBytesUsed;
+    }
+    ramBytesUsed =
+        root.ramBytesUsed()
+            + SHALLOW_SIZE
+            + RamUsageEstimator.sizeOfMapWithKnownShallowSize(
+                templateMap,
+                RamUsageEstimator.SHALLOW_SIZE_OF_HASHMAP,
+                RamUsageEstimator.SHALLOW_SIZE_OF_HASHMAP_ENTRY);
+    return ramBytesUsed;
+  }
+
+  public void setRamBytesUsed(long ramBytesUsed) {
+    this.ramBytesUsed = ramBytesUsed;
+  }
+
+  private static class SchemaNodePostOrderIterator implements 
Iterator<SchemaNode> {
+    private final Deque<Pair<SchemaNode, Iterator<SchemaNode>>> stack = new 
ArrayDeque<>();
+    private SchemaNode nextNode;
+
+    public SchemaNodePostOrderIterator(SchemaNode root) {
+      stack.push(new Pair<>(root, root.getChildrenIterator()));
+      prepareNext();
+    }
+
+    @Override
+    public boolean hasNext() {
+      return nextNode != null;
+    }
+
+    @Override
+    public SchemaNode next() {
+      if (!hasNext()) {
+        throw new NoSuchElementException();
+      }
+      SchemaNode result = nextNode;
+      prepareNext();
+      return result;
+    }
+
+    private void prepareNext() {
+      nextNode = null;
+      while (!stack.isEmpty()) {
+        Pair<SchemaNode, Iterator<SchemaNode>> pair = stack.peek();
+        SchemaNode currentNode = pair.getLeft();
+        Iterator<SchemaNode> childrenIterator = pair.getRight();
+        if (childrenIterator.hasNext()) {
+          SchemaNode child = childrenIterator.next();
+          stack.push(new Pair<>(child, child.getChildrenIterator()));
+        } else {
+          stack.pop();
+          nextNode = currentNode;
+          return;
         }
-        hasNormalTimeSeries = true;
-      } else {
-        SchemaInternalNode internalNode;
-        if (nodeType == SCHEMA_ENTITY_NODE) {
-          internalNode = SchemaEntityNode.deserialize(inputStream);
-          int templateId = internalNode.getAsEntityNode().getTemplateId();
-          if (templateId != NON_TEMPLATE) {
-            templateMap.putIfAbsent(templateId, 
templateManager.getTemplate(templateId));
+      }
+    }
+  }
+
+  public static class SchemaNodeBatchDeserializer {
+    private byte nodeType;
+    private int childNum;
+    private Deque<SchemaNode> stack = new ArrayDeque<>();
+    private SchemaNode child;
+    private boolean hasLogicalView = false;
+    private boolean hasNormalTimeSeries = false;
+    private Map<Integer, Template> templateMap = new HashMap<>();
+    private boolean isFirstBatch = true;
+
+    public boolean isFirstBatch() {
+      return isFirstBatch;
+    }
+
+    public void deserializeFromBatch(InputStream inputStream) throws 
IOException {
+      isFirstBatch = false;
+      while (inputStream.available() > 0) {
+        nodeType = ReadWriteIOUtils.readByte(inputStream);
+        if (nodeType == SCHEMA_MEASUREMENT_NODE) {
+          SchemaMeasurementNode measurementNode = 
SchemaMeasurementNode.deserialize(inputStream);
+          stack.push(measurementNode);
+          if (measurementNode.isLogicalView()) {
+            hasLogicalView = true;
           }
+          hasNormalTimeSeries = true;
         } else {
-          internalNode = SchemaInternalNode.deserialize(inputStream);
-        }
+          SchemaInternalNode internalNode;
+          if (nodeType == SCHEMA_ENTITY_NODE) {
+            internalNode = SchemaEntityNode.deserialize(inputStream);
+            int templateId = internalNode.getAsEntityNode().getTemplateId();
+            if (templateId != NON_TEMPLATE) {
+              templateMap.putIfAbsent(templateId, 
templateManager.getTemplate(templateId));
+            }
+          } else {
+            internalNode = SchemaInternalNode.deserialize(inputStream);
+          }
 
-        childNum = ReadWriteIOUtils.readInt(inputStream);
-        while (childNum > 0) {
-          child = stack.pop();
-          internalNode.addChild(child.getName(), child);
-          if (child.isMeasurement()) {
-            SchemaMeasurementNode measurementNode = 
child.getAsMeasurementNode();
-            if (measurementNode.getAlias() != null) {
-              internalNode
-                  .getAsEntityNode()
-                  .addAliasChild(measurementNode.getAlias(), measurementNode);
+          childNum = ReadWriteIOUtils.readInt(inputStream);
+          while (childNum > 0) {
+            child = stack.pop();
+            internalNode.addChild(child.getName(), child);
+            if (child.isMeasurement()) {
+              SchemaMeasurementNode measurementNode = 
child.getAsMeasurementNode();
+              if (measurementNode.getAlias() != null) {
+                internalNode
+                    .getAsEntityNode()
+                    .addAliasChild(measurementNode.getAlias(), 
measurementNode);
+              }
             }
+            childNum--;
           }
-          childNum--;
+          stack.push(internalNode);
         }
-        stack.push(internalNode);
       }
     }
-    ClusterSchemaTree result = new ClusterSchemaTree(stack.poll());
-    result.templateMap = templateMap;
-    result.hasLogicalMeasurementPath = hasLogicalView;
-    result.hasNormalTimeSeries = hasNormalTimeSeries;
-    return result;
+
+    public ClusterSchemaTree finish() {
+      try {
+        ClusterSchemaTree result = new ClusterSchemaTree(stack.poll());
+        result.templateMap = templateMap;
+        result.hasLogicalMeasurementPath = hasLogicalView;
+        result.hasNormalTimeSeries = hasNormalTimeSeries;
+        return result;
+      } finally {
+        nodeType = 0;
+        childNum = 0;
+        stack.clear();
+        child = null;
+        hasLogicalView = false;
+        hasNormalTimeSeries = false;
+        templateMap = new HashMap<>();
+        isFirstBatch = true;

Review Comment:
   add some comments about why you need to construct a new HashMap instead of 
calling clear method for it



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to