jt2594838 commented on code in PR #16027:
URL: https://github.com/apache/iotdb/pull/16027#discussion_r2652335302


##########
integration-test/src/test/java/org/apache/iotdb/session/it/IoTDBSessionSchemaTemplateIT.java:
##########
@@ -490,6 +490,7 @@ public void testHybridAutoExtendSchemaTemplate()
           Arrays.asList(1d, 2d, 3));
       Assert.fail();
     } catch (StatementExecutionException e) {
+      System.out.println("hi: " + e.getMessage());

Review Comment:
   Remove



##########
iotdb-client/session/src/main/java/org/apache/iotdb/session/Session.java:
##########
@@ -3075,9 +3075,10 @@ private TSInsertTabletReq genTSInsertTabletReq(Tablet 
tablet, boolean sorted, bo
                 .getOrDefault(
                     measurementSchema.getType(),
                     TSEncoding.valueOf(
-                        TSFileDescriptor.getInstance()
-                            .getConfig()
-                            .getValueEncoder(measurementSchema.getType())))
+                        String.valueOf(
+                            TSFileDescriptor.getInstance()
+                                .getConfig()
+                                
.getValueEncoder(measurementSchema.getType()))))

Review Comment:
   What is this?



##########
iotdb-core/confignode/src/main/java/org/apache/iotdb/confignode/manager/ProcedureManager.java:
##########
@@ -469,6 +474,24 @@ public TSStatus alterLogicalView(final 
TAlterLogicalViewReq req) {
     return waitingProcedureFinished(procedure);
   }
 
+  public TSStatus alterTimeSeriesDataType(final TAlterTimeSeriesReq req) {
+    AlterTimeSeriesDataTypeProcedure procedure;
+    synchronized (this) {
+      procedure =
+          new AlterTimeSeriesDataTypeProcedure(
+              req.getQueryId(),
+              (MeasurementPath)
+                  
PathDeserializeUtil.deserialize(ByteBuffer.wrap(req.getMeasurementPath())),
+              //
+              // 
MeasurementPath.deserialize(ByteBuffer.wrap(req.getMeasurementPath())),

Review Comment:
   Remove



##########
integration-test/src/test/java/org/apache/iotdb/relational/it/schema/IoTDBAlterTimeSeriesTypeIT.java:
##########


Review Comment:
   Move this to a correct package (not relational)



##########
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/operator/source/SeriesScanUtil.java:
##########
@@ -312,6 +331,7 @@ public boolean canUseCurrentFileStatistics() {
     checkState(firstTimeSeriesMetadata != null, "no first file");
 
     if (currentFileOverlapped() || firstTimeSeriesMetadata.isModified()) {
+      //        || !firstTimeSeriesMetadata.isModified()) {

Review Comment:
   Remove



##########
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/execution/config/executor/ClusterConfigTaskExecutor.java:
##########
@@ -3157,6 +3161,72 @@ public TSStatus alterLogicalViewByPipe(
     return tsStatus;
   }
 
+  @Override
+  public SettableFuture<ConfigTaskResult> alterTimeSeriesDataType(
+      final String queryId, final AlterTimeSeriesStatement 
alterTimeSeriesStatement) {
+    final SettableFuture<ConfigTaskResult> future = SettableFuture.create();
+    // Will only occur if no permission
+    if (alterTimeSeriesStatement.getPath() == null) {
+      future.set(new ConfigTaskResult(TSStatusCode.SUCCESS_STATUS));
+      return future;
+    }
+
+    ByteBuffer measurementPathBuffer = null;
+    try (ByteArrayOutputStream baos = new ByteArrayOutputStream()) {
+      alterTimeSeriesStatement.getPath().serialize(baos);
+      measurementPathBuffer = ByteBuffer.wrap(baos.toByteArray());
+    } catch (IOException ignored) {
+      // ByteArrayOutputStream won't throw IOException
+    }
+
+    final ByteArrayOutputStream stream = new ByteArrayOutputStream();
+    try {
+      ReadWriteIOUtils.write(alterTimeSeriesStatement.getDataType(), stream);
+    } catch (final IOException ignored) {
+      // ByteArrayOutputStream won't throw IOException
+    }

Review Comment:
   new ByteArrayOutputStream(1) or may simply allocate a ByteBuffer with 
capacity 1.



##########
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/utils/SchemaUtils.java:
##########
@@ -256,4 +288,254 @@ public static List<String> 
splitPartialBuiltinAggregation(TAggregationType aggre
             String.format("Invalid Aggregation function: %s", 
aggregationType));
     }
   }
+
+  private static void addSymmetricPairs(
+      Set<Pair<TSDataType, TSDataType>> set, TSDataType... dataTypes) {
+    for (int i = 0; i < dataTypes.length; i++) {
+      for (int j = i + 1; j < dataTypes.length; j++) {
+        set.add(new Pair<>(dataTypes[i], dataTypes[j]));
+        set.add(new Pair<>(dataTypes[j], dataTypes[i]));
+      }
+    }
+  }

Review Comment:
   This implementation is not very efficient (n^2 as you can imagine), may 
consider something like Disjoint Set.
   
   Map<TsDataType, Class> dataTypeColumnClassMap;
   
   dataTypeColumnClassMap.put(DATE, IntColumn.class);
   dataTypeColumnClassMap.put(INT32, IntColumn.class);
   
   dataTypeColumnClassMap.get(typeA) == dataTypeColumnClassMap.get(typeB)
   



##########
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/execution/config/metadata/AlterTimeSeriesTask.java:
##########
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.db.queryengine.plan.execution.config.metadata;
+
+import org.apache.iotdb.db.queryengine.plan.execution.config.ConfigTaskResult;
+import org.apache.iotdb.db.queryengine.plan.execution.config.IConfigTask;
+import 
org.apache.iotdb.db.queryengine.plan.execution.config.executor.IConfigTaskExecutor;
+import 
org.apache.iotdb.db.queryengine.plan.statement.metadata.AlterTimeSeriesStatement;
+
+import com.google.common.util.concurrent.ListenableFuture;
+
+public class AlterTimeSeriesTask implements IConfigTask {
+  private final String queryId;
+
+  private final AlterTimeSeriesStatement alterTimeSeriesStatement;
+
+  public AlterTimeSeriesTask(
+      final String queryId, final AlterTimeSeriesStatement 
alterTimeSeriesStatement) {
+    this.queryId = queryId;
+    this.alterTimeSeriesStatement = alterTimeSeriesStatement;
+  }
+
+  @Override
+  public ListenableFuture<ConfigTaskResult> execute(final IConfigTaskExecutor 
configTaskExecutor)
+      throws InterruptedException {
+    if (alterTimeSeriesStatement.getDataType() != null) {
+      return configTaskExecutor.alterTimeSeriesDataType(queryId, 
alterTimeSeriesStatement);
+    } else {
+      // not support
+      throw new InterruptedException("AlterTimeSeriesTask is not support");

Review Comment:
   Exception type and error message are improper.



##########
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/pipe/resource/memory/InsertNodeMemoryEstimator.java:
##########
@@ -571,27 +575,65 @@ public static long sizeOfColumns(
       if (measurementSchemas[i] == null || measurementSchemas[i].getType() == 
null) {
         continue;
       }
-      switch (measurementSchemas[i].getType()) {
+      switch ((dataTypes != null && dataTypes[i] != null)
+          ? dataTypes[i]
+          : measurementSchemas[i].getType()) {
         case INT64:
         case TIMESTAMP:
           {
-            size += RamUsageEstimator.sizeOf((long[]) columns[i]);
+            if (columns[i] instanceof long[]) {
+              size += RamUsageEstimator.sizeOf((long[]) columns[i]);
+            } else {
+              Object[] array = (Object[]) columns[i];
+              long[] targetArray = new long[array.length];
+              for (int j = 0; j < array.length; j++) {
+                targetArray[j] = ((Number) array[j]).longValue();
+              }
+              size += RamUsageEstimator.sizeOf(targetArray);

Review Comment:
   What is this?



##########
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/exception/metadata/DataTypeMismatchException.java:
##########
@@ -46,6 +49,21 @@ public DataTypeMismatchException(
             insertType,
             time,
             value == null ? "null" : processValue(value.toString())));
+
+    StackTraceElement[] stackTraceElements = 
Thread.currentThread().getStackTrace();
+
+    // 通常,我们需要忽略前两层堆栈信息,因为它们分别对应于printCallerInfo()方法和当前正在执行的方法(例如methodB())
+    if (stackTraceElements.length > 2) {
+      // 获取调用者的信息
+      StackTraceElement caller =
+          stackTraceElements[2]; // 获取调用者信息,索引从0开始,所以要取第3个元素(因为第0和第1个是当前方法的堆栈)
+      log.error("调用者类名: {}", caller.getClassName());
+      log.error("调用者方法名: {}", caller.getMethodName());
+      log.error("调用者文件名: {}", caller.getFileName());
+      log.error("调用者行号: {}", caller.getLineNumber());
+    } else {
+      log.error("堆栈跟踪信息不足,无法确定调用者信息。");
+    }

Review Comment:
   Remove 



##########
iotdb-core/confignode/src/main/java/org/apache/iotdb/confignode/persistence/schema/ConfigMTree.java:
##########
@@ -992,21 +994,72 @@ public void commitDeleteColumn(
     }
   }
 
+  public void preAlterColumnDataType(
+      PartialPath database, String tableName, String columnName, TSDataType 
dataType)
+      throws MetadataException {
+    final ConfigTableNode node = getTableNode(database, tableName);
+    final TsTableColumnSchema columnSchema = 
node.getTable().getColumnSchema(columnName);
+
+    //    ((FieldColumnSchema) columnSchema).getEncoding().isSupported()
+    if (Objects.isNull(columnSchema)) {
+      throw new ColumnNotExistsException(
+          PathUtils.unQualifyDatabaseName(database.getFullPath()), tableName, 
columnName);
+    }

Review Comment:
   Where is the encoding processed?



##########
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/operator/source/SeriesScanUtil.java:
##########
@@ -839,8 +911,473 @@ public TsBlock nextPage() throws IOException {
 
       firstPageReader = null;
 
+      return getTransferedDataTypeTsBlock(tsBlock);
+    }
+  }
+
+  private TsBlock getTransferedDataTypeTsBlock(TsBlock tsBlock) {
+    Column[] valueColumns = tsBlock.getValueColumns();
+    int length = tsBlock.getValueColumnCount();
+    boolean isTypeInconsistent = false;
+    if (length > 0) {
+      for (int i = 0; i < length; i++) {
+        TSDataType finalDataType = getTsDataTypeList().get(i);
+        if ((valueColumns[i].getDataType() != finalDataType)
+            && (SchemaUtils.isUsingSameColumn(valueColumns[i].getDataType(), 
finalDataType)
+                || (valueColumns[i].getDataType().equals(TSDataType.DATE)
+                    && Arrays.asList(TSDataType.STRING, TSDataType.TEXT)
+                        .contains(finalDataType)))) {

Review Comment:
   Use direct comparison



##########
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/operator/source/SeriesScanUtil.java:
##########
@@ -839,8 +911,473 @@ public TsBlock nextPage() throws IOException {
 
       firstPageReader = null;
 
+      return getTransferedDataTypeTsBlock(tsBlock);
+    }
+  }
+
+  private TsBlock getTransferedDataTypeTsBlock(TsBlock tsBlock) {
+    Column[] valueColumns = tsBlock.getValueColumns();
+    int length = tsBlock.getValueColumnCount();
+    boolean isTypeInconsistent = false;
+    if (length > 0) {
+      for (int i = 0; i < length; i++) {
+        TSDataType finalDataType = getTsDataTypeList().get(i);
+        if ((valueColumns[i].getDataType() != finalDataType)
+            && (SchemaUtils.isUsingSameColumn(valueColumns[i].getDataType(), 
finalDataType)

Review Comment:
   !SchemaUtils.isUsingSameColumn(valueColumns[i].getDataType(), finalDataType) 
?



##########
integration-test/src/test/java/org/apache/iotdb/relational/it/schema/IoTDBAlterColumnTypeIT.java:
##########
@@ -0,0 +1,2286 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.schema;
+
+import org.apache.iotdb.commons.utils.MetadataUtils;
+import org.apache.iotdb.db.utils.SchemaUtils;
+import org.apache.iotdb.isession.ISession;
+import org.apache.iotdb.isession.ITableSession;
+import org.apache.iotdb.isession.SessionDataSet;
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.IoTDBConnectionException;
+import org.apache.iotdb.rpc.StatementExecutionException;
+
+import org.apache.tsfile.enums.ColumnCategory;
+import org.apache.tsfile.enums.TSDataType;
+import org.apache.tsfile.exception.write.WriteProcessException;
+import org.apache.tsfile.file.metadata.ColumnSchema;
+import org.apache.tsfile.file.metadata.TableSchema;
+import org.apache.tsfile.file.metadata.enums.CompressionType;
+import org.apache.tsfile.file.metadata.enums.TSEncoding;
+import org.apache.tsfile.read.common.Field;
+import org.apache.tsfile.read.common.Path;
+import org.apache.tsfile.read.common.RowRecord;
+import org.apache.tsfile.utils.Binary;
+import org.apache.tsfile.write.TsFileWriter;
+import org.apache.tsfile.write.record.Tablet;
+import org.apache.tsfile.write.schema.IMeasurementSchema;
+import org.apache.tsfile.write.schema.MeasurementSchema;
+import org.apache.tsfile.write.v4.ITsFileWriter;
+import org.apache.tsfile.write.v4.TsFileWriterBuilder;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.NotSupportedException;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.sql.Connection;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareData;
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static 
org.apache.iotdb.relational.it.session.IoTDBSessionRelationalIT.genValue;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+@SuppressWarnings("ResultOfMethodCallIgnored")
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBAlterColumnTypeIT {
+
+  private static final Logger log = 
LoggerFactory.getLogger(IoTDBAlterColumnTypeIT.class);
+  private static long timeout = -1;
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+    
EnvFactory.getEnv().getConfig().getDataNodeConfig().setCompactionScheduleInterval(1000);
+    EnvFactory.getEnv().initClusterEnvironment();
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnection()) {
+      session.executeNonQueryStatement("DROP DATABASE IF EXISTS test");
+    }
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnection()) {
+      session.executeNonQueryStatement("CREATE DATABASE IF NOT EXISTS test");
+    }
+  }
+
+  @AfterClass
+  public static void tearDown() throws Exception {
+    EnvFactory.getEnv().cleanClusterEnvironment();
+  }
+
+  @Test
+  public void testWriteAndAlter()
+      throws IoTDBConnectionException,
+          StatementExecutionException,
+          IOException,
+          WriteProcessException {
+    Set<TSDataType> typesToTest = new HashSet<>();
+    Collections.addAll(typesToTest, TSDataType.values());
+    typesToTest.remove(TSDataType.VECTOR);
+    typesToTest.remove(TSDataType.UNKNOWN);
+
+    for (TSDataType from : typesToTest) {
+      for (TSDataType to : typesToTest) {
+        if (from != to && to.isCompatible(from)) {
+          System.out.printf("testing %s to %s%n", from, to);
+          doWriteAndAlter(from, to);
+          testAlignDeviceSequenceDataQuery(from, to);
+          testAlignDeviceUnSequenceDataQuery(from, to);
+          testAlignDeviceUnSequenceOverlappedDataQuery(from, to);
+        }
+      }
+    }
+  }
+
+  private void doWriteAndAlter(TSDataType from, TSDataType to)
+      throws IoTDBConnectionException, StatementExecutionException {
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      session.executeNonQueryStatement("SET CONFIGURATION 
enable_unseq_space_compaction='false'");
+      if (from == TSDataType.DATE && !to.isCompatible(from)) {
+        throw new NotSupportedException("Not supported DATE type.");
+      }
+
+      // create a table with type of "from"
+      session.executeNonQueryStatement(
+          "CREATE TABLE IF NOT EXISTS write_and_alter_column_type (s1 " + from 
+ ")");
+
+      // write a sequence tsfile point of "from"
+      Tablet tablet =
+          new Tablet(
+              "write_and_alter_column_type",
+              Collections.singletonList("s1"),
+              Collections.singletonList(from),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 1);
+      tablet.addValue("s1", 0, genValue(from, 1));
+      session.insert(tablet);
+      tablet.reset();
+      session.executeNonQueryStatement("FLUSH");
+
+      // write an unsequence tsfile point of "from"
+      tablet.addTimestamp(0, 1);
+      tablet.addValue("s1", 0, genValue(from, 1));
+      session.insert(tablet);
+      tablet.reset();
+      session.executeNonQueryStatement("FLUSH");
+
+      // write a sequence memtable point of "from"
+      tablet.addTimestamp(0, 2);
+      tablet.addValue("s1", 0, genValue(from, 2));
+      session.insert(tablet);
+      tablet.reset();
+
+      // write an unsequence memtable point of "from"
+      tablet.addTimestamp(0, 1);
+      tablet.addValue("s1", 0, genValue(from, 1));
+      session.insert(tablet);
+      tablet.reset();
+
+      SessionDataSet dataSet1 =
+          session.executeQueryStatement("select * from 
write_and_alter_column_type order by time");
+      RowRecord rec1;
+      for (int i = 1; i <= 2; i++) {
+        rec1 = dataSet1.next();
+        assertEquals(i, rec1.getFields().get(0).getLongV());
+        //        System.out.println(i + " is " + 
rec1.getFields().get(1).toString());
+      }
+
+      // alter the type to "to"
+      boolean isCompatible = MetadataUtils.canAlter(from, to);
+      if (isCompatible) {
+        session.executeNonQueryStatement(
+            "ALTER TABLE write_and_alter_column_type ALTER COLUMN s1 SET DATA 
TYPE " + to);
+      } else {
+        try {
+          session.executeNonQueryStatement(
+              "ALTER TABLE write_and_alter_column_type ALTER COLUMN s1 SET 
DATA TYPE " + to);
+        } catch (StatementExecutionException e) {
+          assertEquals(
+              "701: New type " + to + " is not compatible with the existing 
one " + from,
+              e.getMessage());
+        }
+      }
+
+      // If don't execute the flush" operation, verify if result can get valid 
value, not be null
+      // when query memtable.
+      //      session.executeNonQueryStatement("FLUSH");
+
+      SessionDataSet dataSet =
+          session.executeQueryStatement(
+              "select time, s1 from write_and_alter_column_type order by 
time");
+      RowRecord rec;
+      TSDataType newType = isCompatible ? to : from;
+      for (int i = 1; i <= 2; i++) {
+        rec = dataSet.next();
+        assertEquals(i, rec.getFields().get(0).getLongV());
+        if (newType == TSDataType.BLOB) {
+          assertEquals(genValue(newType, i), 
rec.getFields().get(1).getBinaryV());
+        } else if (newType == TSDataType.DATE) {
+          assertEquals(genValue(newType, i), 
rec.getFields().get(1).getDateV());
+        } else if (newType == TSDataType.STRING || newType == TSDataType.TEXT) 
{
+          if (from == TSDataType.DATE) {
+            assertEquals(
+                new Binary(genValue(from, i).toString(), 
StandardCharsets.UTF_8),
+                rec.getFields().get(1).getBinaryV());
+          } else {
+            assertEquals(
+                newType.castFromSingleValue(from, genValue(from, i)),
+                rec.getFields().get(1).getBinaryV());
+          }
+        } else {
+          assertEquals(genValue(newType, i).toString(), 
rec.getFields().get(1).toString());
+        }
+      }
+      assertNull(dataSet.next());
+      dataSet.close();
+
+      // write an altered point in sequence and unsequnce tsfile
+      tablet =
+          new Tablet(
+              "write_and_alter_column_type",
+              Collections.singletonList("s1"),
+              Collections.singletonList(newType),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 3);
+      tablet.addValue("s1", 0, genValue(newType, 3));
+      session.insert(tablet);
+      tablet.reset();
+
+      tablet.addTimestamp(0, 1);
+      tablet.addValue("s1", 0, genValue(newType, 1));
+      session.insert(tablet);
+      tablet.reset();
+      session.executeNonQueryStatement("FLUSH");
+
+      // write an altered point in sequence and unsequnce memtable
+      tablet.addTimestamp(0, 4);
+      tablet.addValue("s1", 0, genValue(newType, 4));
+      session.insert(tablet);
+      tablet.reset();
+
+      tablet.addTimestamp(0, 2);
+      tablet.addValue("s1", 0, genValue(newType, 2));
+      session.insert(tablet);
+      tablet.reset();
+
+      dataSet =
+          session.executeQueryStatement(
+              "select time, s1 from write_and_alter_column_type order by 
time");
+      for (int i = 1; i <= 4; i++) {
+        rec = dataSet.next();
+        assertEquals(i, rec.getFields().get(0).getLongV());
+        if (newType == TSDataType.BLOB) {
+          assertEquals(genValue(newType, i), 
rec.getFields().get(1).getBinaryV());
+        } else if (newType == TSDataType.DATE) {
+          assertEquals(genValue(newType, i), 
rec.getFields().get(1).getDateV());
+        } else if (newType == TSDataType.STRING || newType == TSDataType.TEXT) 
{
+          assertEquals(genValue(to, i), rec.getFields().get(1).getBinaryV());
+        } else {
+          assertEquals(genValue(newType, i).toString(), 
rec.getFields().get(1).toString());
+        }
+      }
+      assertFalse(dataSet.hasNext());
+
+      dataSet =
+          session.executeQueryStatement(
+              "select min(s1),max(s1),first(s1),last(s1) from 
write_and_alter_column_type");
+      rec = dataSet.next();
+      int[] expectedValue = {1, 4, 1, 4};
+      for (int i = 0; i < 4; i++) {
+        if (newType == TSDataType.BLOB) {
+          assertEquals(genValue(newType, expectedValue[i]), 
rec.getFields().get(i).getBinaryV());
+        } else if (newType == TSDataType.DATE) {
+          assertEquals(genValue(newType, expectedValue[i]), 
rec.getFields().get(i).getDateV());
+        } else {
+          assertEquals(
+              genValue(newType, expectedValue[i]).toString(), 
rec.getFields().get(i).toString());
+        }
+      }
+      assertFalse(dataSet.hasNext());
+
+      if (newType.isNumeric()) {
+        dataSet =
+            session.executeQueryStatement(
+                "select avg(s1),sum(s1) from write_and_alter_column_type");
+        rec = dataSet.next();
+        assertEquals(2.5, rec.getFields().get(0).getDoubleV(), 0.001);
+        assertEquals(10.0, rec.getFields().get(1).getDoubleV(), 0.001);
+        assertFalse(dataSet.hasNext());
+      }
+
+    } finally {
+      try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+        session.executeNonQueryStatement("DROP TABLE 
write_and_alter_column_type");
+      }
+    }
+  }
+
+  @Test
+  public void testAlterWithoutWrite() throws IoTDBConnectionException, 
StatementExecutionException {
+    Set<TSDataType> typesToTest = new HashSet<>();
+    Collections.addAll(typesToTest, TSDataType.values());
+    typesToTest.remove(TSDataType.VECTOR);
+    typesToTest.remove(TSDataType.UNKNOWN);
+    //    typesToTest.remove(TSDataType.STRING);
+    //    typesToTest.remove(TSDataType.TEXT);
+    //    typesToTest.remove(TSDataType.DATE);

Review Comment:
   Remove commented lines



##########
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/utils/SchemaUtils.java:
##########
@@ -256,4 +288,254 @@ public static List<String> 
splitPartialBuiltinAggregation(TAggregationType aggre
             String.format("Invalid Aggregation function: %s", 
aggregationType));
     }
   }
+
+  private static void addSymmetricPairs(
+      Set<Pair<TSDataType, TSDataType>> set, TSDataType... dataTypes) {
+    for (int i = 0; i < dataTypes.length; i++) {
+      for (int j = i + 1; j < dataTypes.length; j++) {
+        set.add(new Pair<>(dataTypes[i], dataTypes[j]));
+        set.add(new Pair<>(dataTypes[j], dataTypes[i]));
+      }
+    }
+  }
+
+  public static boolean isUsingSameColumn(TSDataType originalDataType, 
TSDataType dataType) {
+    if (originalDataType == dataType) {
+      return true;
+    }
+    return SAME_TYPE_PAIRS.contains(new Pair<>(originalDataType, dataType));
+  }
+
+  public static void changeMetadataModified(
+      TimeseriesMetadata timeseriesMetadata, TSDataType targetDataType) {
+    if (timeseriesMetadata == null) {
+      return;
+    }
+    if (!SchemaUtils.isUsingSameColumn(timeseriesMetadata.getTsDataType(), 
targetDataType)
+        && Arrays.asList(TSDataType.STRING, 
TSDataType.TEXT).contains(targetDataType)) {
+      timeseriesMetadata.setModified(true);
+      if (timeseriesMetadata.getChunkMetadataList() != null) {
+        timeseriesMetadata.setChunkMetadataList(
+            timeseriesMetadata.getChunkMetadataList().stream()
+                .map(
+                    iChunkMetadata -> {
+                      if (iChunkMetadata == null) return null;
+                      iChunkMetadata.setModified(true);
+                      return (ChunkMetadata) iChunkMetadata;
+                    })
+                .collect(Collectors.toList()));
+      }

Review Comment:
   Is it necessary to collect as a new list? You may simply modify the original 
chunkMetadata



##########
iotdb-core/confignode/src/main/java/org/apache/iotdb/confignode/manager/schema/ClusterSchemaManager.java:
##########
@@ -1370,6 +1371,34 @@ public synchronized Pair<TSStatus, TsTable> 
tableColumnCheckForColumnExtension(
     return new Pair<>(RpcUtils.SUCCESS_STATUS, expandedTable);
   }
 
+  public synchronized Pair<TSStatus, TsTable> 
tableColumnCheckForColumnAltering(
+      final String database,
+      final String tableName,
+      final String columnName,
+      final TSDataType dataType)
+      throws MetadataException {
+    final TsTable originalTable = getTableIfExists(database, 
tableName).orElse(null);
+
+    if (Objects.isNull(originalTable)) {
+      return new Pair<>(
+          RpcUtils.getStatus(
+              TSStatusCode.TABLE_NOT_EXISTS,
+              String.format("Table '%s.%s' does not exist", database, 
tableName)),
+          null);
+    }
+
+    TSStatus tsStatus =
+        clusterSchemaInfo.preAlterColumnDataType(database, tableName, 
columnName, dataType);
+    if (tsStatus.getCode() != TSStatusCode.SUCCESS_STATUS.getStatusCode()) {
+      return new Pair<>(tsStatus, null);
+    }
+
+    final TsTable alteredTable = new TsTable(originalTable);
+    alteredTable.getColumnSchema(columnName).setDataType(dataType);
+
+    return new Pair<>(RpcUtils.SUCCESS_STATUS, alteredTable);

Review Comment:
   Check the encoding or explain why is the encoding can be spared using a 
comment.



##########
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/plan/parser/ASTVisitor.java:
##########
@@ -667,6 +676,22 @@ public void parseAliasClause(
     }
   }
 
+  private TSDataType 
parseDataTypeAttribute(IoTDBSqlParser.AttributeValueContext ctx) {
+    TSDataType dataType = null;
+    if (ctx != null) {
+      String dataTypeString = parseAttributeValue(ctx);
+      try {
+        dataType = TSDataType.valueOf(dataTypeString);
+        if (TSDataType.UNKNOWN.equals(dataType) || 
TSDataType.VECTOR.equals(dataType)) {
+          throw new SemanticException(String.format("Unsupported datatype: 
%s", dataTypeString));
+        }
+      } catch (Exception e) {
+        throw new SemanticException(String.format("Unsupported datatype: %s", 
dataTypeString));
+      }
+    }

Review Comment:
   May throw an exception here instead of elsewhere.



##########
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/queryengine/execution/operator/source/SeriesScanUtil.java:
##########
@@ -395,6 +415,7 @@ private void filterFirstChunkMetadata() {
     }
 
     if (currentChunkOverlapped() || firstChunkMetadata.isModified()) {
+      //        || !firstChunkMetadata.isModified()) {

Review Comment:
   Remove



##########
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/storageengine/dataregion/memtable/AbstractMemTable.java:
##########
@@ -356,15 +361,28 @@ public void writeAlignedRow(
 
   public void writeTabletNode(InsertTabletNode insertTabletNode, int start, 
int end) {
     List<IMeasurementSchema> schemaList = new ArrayList<>();
+    int j = 0;
+    for (TSDataType tsDataType : insertTabletNode.getDataTypes()) {
+      logger.error("getDataTypes[{}] is {}", j, tsDataType);
+      j++;
+    }

Review Comment:
   REmove



##########
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/utils/SchemaUtils.java:
##########
@@ -256,4 +288,254 @@ public static List<String> 
splitPartialBuiltinAggregation(TAggregationType aggre
             String.format("Invalid Aggregation function: %s", 
aggregationType));
     }
   }
+
+  private static void addSymmetricPairs(
+      Set<Pair<TSDataType, TSDataType>> set, TSDataType... dataTypes) {
+    for (int i = 0; i < dataTypes.length; i++) {
+      for (int j = i + 1; j < dataTypes.length; j++) {
+        set.add(new Pair<>(dataTypes[i], dataTypes[j]));
+        set.add(new Pair<>(dataTypes[j], dataTypes[i]));
+      }
+    }
+  }
+
+  public static boolean isUsingSameColumn(TSDataType originalDataType, 
TSDataType dataType) {
+    if (originalDataType == dataType) {
+      return true;
+    }
+    return SAME_TYPE_PAIRS.contains(new Pair<>(originalDataType, dataType));
+  }
+
+  public static void changeMetadataModified(
+      TimeseriesMetadata timeseriesMetadata, TSDataType targetDataType) {
+    if (timeseriesMetadata == null) {
+      return;
+    }
+    if (!SchemaUtils.isUsingSameColumn(timeseriesMetadata.getTsDataType(), 
targetDataType)
+        && Arrays.asList(TSDataType.STRING, 
TSDataType.TEXT).contains(targetDataType)) {
+      timeseriesMetadata.setModified(true);
+      if (timeseriesMetadata.getChunkMetadataList() != null) {
+        timeseriesMetadata.setChunkMetadataList(
+            timeseriesMetadata.getChunkMetadataList().stream()
+                .map(
+                    iChunkMetadata -> {
+                      if (iChunkMetadata == null) return null;
+                      iChunkMetadata.setModified(true);
+                      return (ChunkMetadata) iChunkMetadata;
+                    })
+                .collect(Collectors.toList()));
+      }
+    }
+  }
+
+  public static void changeAlignedMetadataModified(
+      AbstractAlignedTimeSeriesMetadata alignedTimeSeriesMetadata,
+      List<TSDataType> targetDataTypeList) {
+    if (alignedTimeSeriesMetadata == null) {
+      return;
+    }
+
+    int i = 0;
+    for (TimeseriesMetadata timeseriesMetadata :
+        alignedTimeSeriesMetadata.getValueTimeseriesMetadataList()) {
+      if ((timeseriesMetadata != null)
+          && !SchemaUtils.isUsingSameColumn(
+              timeseriesMetadata.getTsDataType(), targetDataTypeList.get(i))
+          && Arrays.asList(TSDataType.STRING, TSDataType.TEXT)
+              .contains(targetDataTypeList.get(i))) {
+        timeseriesMetadata.setModified(true);
+        alignedTimeSeriesMetadata.setModified(true);
+        if (timeseriesMetadata.getChunkMetadataList() != null) {
+          timeseriesMetadata.setChunkMetadataList(
+              timeseriesMetadata.getChunkMetadataList().stream()
+                  .map(
+                      iChunkMetadata -> {
+                        if (iChunkMetadata == null) return null;
+                        iChunkMetadata.setModified(true);
+                        return (ChunkMetadata) iChunkMetadata;
+                      })
+                  .collect(Collectors.toList()));
+        }
+      }
+      i++;
+    }
+  }
+
+  public static void changeAlignedMetadataModified(
+      TimeseriesMetadata timeseriesMetadata, TSDataType targetDataType) {
+    if (timeseriesMetadata == null) {
+      return;
+    }
+
+    if (!SchemaUtils.isUsingSameColumn(timeseriesMetadata.getTsDataType(), 
targetDataType)
+        && Arrays.asList(TSDataType.STRING, 
TSDataType.TEXT).contains(targetDataType)) {
+      timeseriesMetadata.setModified(true);
+      if (timeseriesMetadata.getChunkMetadataList() != null) {
+        timeseriesMetadata.setChunkMetadataList(
+            timeseriesMetadata.getChunkMetadataList().stream()
+                .map(
+                    iChunkMetadata -> {
+                      if (iChunkMetadata == null) return null;
+                      iChunkMetadata.setModified(true);
+                      return (ChunkMetadata) iChunkMetadata;
+                    })
+                .collect(Collectors.toList()));
+      }
+    }
+  }
+
+  public static void changeMetadataModified(
+      IChunkMetadata chunkMetadata, TSDataType sourceDataType, TSDataType 
targetDataType) {
+    if (chunkMetadata == null) {
+      return;
+    }
+    if (!SchemaUtils.isUsingSameColumn(sourceDataType, targetDataType)
+        && Arrays.asList(TSDataType.STRING, 
TSDataType.TEXT).contains(targetDataType)) {

Review Comment:
   Use direct comparison, check other places.



##########
integration-test/src/test/java/org/apache/iotdb/relational/it/schema/IoTDBAlterColumnTypeIT.java:
##########
@@ -0,0 +1,2286 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.iotdb.relational.it.schema;
+
+import org.apache.iotdb.commons.utils.MetadataUtils;
+import org.apache.iotdb.db.utils.SchemaUtils;
+import org.apache.iotdb.isession.ISession;
+import org.apache.iotdb.isession.ITableSession;
+import org.apache.iotdb.isession.SessionDataSet;
+import org.apache.iotdb.it.env.EnvFactory;
+import org.apache.iotdb.it.framework.IoTDBTestRunner;
+import org.apache.iotdb.itbase.category.TableClusterIT;
+import org.apache.iotdb.itbase.category.TableLocalStandaloneIT;
+import org.apache.iotdb.rpc.IoTDBConnectionException;
+import org.apache.iotdb.rpc.StatementExecutionException;
+
+import org.apache.tsfile.enums.ColumnCategory;
+import org.apache.tsfile.enums.TSDataType;
+import org.apache.tsfile.exception.write.WriteProcessException;
+import org.apache.tsfile.file.metadata.ColumnSchema;
+import org.apache.tsfile.file.metadata.TableSchema;
+import org.apache.tsfile.file.metadata.enums.CompressionType;
+import org.apache.tsfile.file.metadata.enums.TSEncoding;
+import org.apache.tsfile.read.common.Field;
+import org.apache.tsfile.read.common.Path;
+import org.apache.tsfile.read.common.RowRecord;
+import org.apache.tsfile.utils.Binary;
+import org.apache.tsfile.write.TsFileWriter;
+import org.apache.tsfile.write.record.Tablet;
+import org.apache.tsfile.write.schema.IMeasurementSchema;
+import org.apache.tsfile.write.schema.MeasurementSchema;
+import org.apache.tsfile.write.v4.ITsFileWriter;
+import org.apache.tsfile.write.v4.TsFileWriterBuilder;
+import org.junit.AfterClass;
+import org.junit.Assert;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.experimental.categories.Category;
+import org.junit.runner.RunWith;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.NotSupportedException;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.sql.Connection;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.LinkedHashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Random;
+import java.util.Set;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareData;
+import static org.apache.iotdb.db.it.utils.TestUtils.prepareTableData;
+import static 
org.apache.iotdb.relational.it.session.IoTDBSessionRelationalIT.genValue;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+@SuppressWarnings("ResultOfMethodCallIgnored")
+@RunWith(IoTDBTestRunner.class)
+@Category({TableLocalStandaloneIT.class, TableClusterIT.class})
+public class IoTDBAlterColumnTypeIT {
+
+  private static final Logger log = 
LoggerFactory.getLogger(IoTDBAlterColumnTypeIT.class);
+  private static long timeout = -1;
+
+  @BeforeClass
+  public static void setUp() throws Exception {
+    
EnvFactory.getEnv().getConfig().getDataNodeConfig().setCompactionScheduleInterval(1000);
+    EnvFactory.getEnv().initClusterEnvironment();
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnection()) {
+      session.executeNonQueryStatement("DROP DATABASE IF EXISTS test");
+    }
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnection()) {
+      session.executeNonQueryStatement("CREATE DATABASE IF NOT EXISTS test");
+    }
+  }
+
+  @AfterClass
+  public static void tearDown() throws Exception {
+    EnvFactory.getEnv().cleanClusterEnvironment();
+  }
+
+  @Test
+  public void testWriteAndAlter()
+      throws IoTDBConnectionException,
+          StatementExecutionException,
+          IOException,
+          WriteProcessException {
+    Set<TSDataType> typesToTest = new HashSet<>();
+    Collections.addAll(typesToTest, TSDataType.values());
+    typesToTest.remove(TSDataType.VECTOR);
+    typesToTest.remove(TSDataType.UNKNOWN);
+
+    for (TSDataType from : typesToTest) {
+      for (TSDataType to : typesToTest) {
+        if (from != to && to.isCompatible(from)) {
+          System.out.printf("testing %s to %s%n", from, to);
+          doWriteAndAlter(from, to);
+          testAlignDeviceSequenceDataQuery(from, to);
+          testAlignDeviceUnSequenceDataQuery(from, to);
+          testAlignDeviceUnSequenceOverlappedDataQuery(from, to);
+        }
+      }
+    }
+  }
+
+  private void doWriteAndAlter(TSDataType from, TSDataType to)
+      throws IoTDBConnectionException, StatementExecutionException {
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      session.executeNonQueryStatement("SET CONFIGURATION 
enable_unseq_space_compaction='false'");
+      if (from == TSDataType.DATE && !to.isCompatible(from)) {
+        throw new NotSupportedException("Not supported DATE type.");
+      }
+
+      // create a table with type of "from"
+      session.executeNonQueryStatement(
+          "CREATE TABLE IF NOT EXISTS write_and_alter_column_type (s1 " + from 
+ ")");
+
+      // write a sequence tsfile point of "from"
+      Tablet tablet =
+          new Tablet(
+              "write_and_alter_column_type",
+              Collections.singletonList("s1"),
+              Collections.singletonList(from),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 1);
+      tablet.addValue("s1", 0, genValue(from, 1));
+      session.insert(tablet);
+      tablet.reset();
+      session.executeNonQueryStatement("FLUSH");
+
+      // write an unsequence tsfile point of "from"
+      tablet.addTimestamp(0, 1);
+      tablet.addValue("s1", 0, genValue(from, 1));
+      session.insert(tablet);
+      tablet.reset();
+      session.executeNonQueryStatement("FLUSH");
+
+      // write a sequence memtable point of "from"
+      tablet.addTimestamp(0, 2);
+      tablet.addValue("s1", 0, genValue(from, 2));
+      session.insert(tablet);
+      tablet.reset();
+
+      // write an unsequence memtable point of "from"
+      tablet.addTimestamp(0, 1);
+      tablet.addValue("s1", 0, genValue(from, 1));
+      session.insert(tablet);
+      tablet.reset();
+
+      SessionDataSet dataSet1 =
+          session.executeQueryStatement("select * from 
write_and_alter_column_type order by time");
+      RowRecord rec1;
+      for (int i = 1; i <= 2; i++) {
+        rec1 = dataSet1.next();
+        assertEquals(i, rec1.getFields().get(0).getLongV());
+        //        System.out.println(i + " is " + 
rec1.getFields().get(1).toString());
+      }
+
+      // alter the type to "to"
+      boolean isCompatible = MetadataUtils.canAlter(from, to);
+      if (isCompatible) {
+        session.executeNonQueryStatement(
+            "ALTER TABLE write_and_alter_column_type ALTER COLUMN s1 SET DATA 
TYPE " + to);
+      } else {
+        try {
+          session.executeNonQueryStatement(
+              "ALTER TABLE write_and_alter_column_type ALTER COLUMN s1 SET 
DATA TYPE " + to);
+        } catch (StatementExecutionException e) {
+          assertEquals(
+              "701: New type " + to + " is not compatible with the existing 
one " + from,
+              e.getMessage());
+        }
+      }
+
+      // If don't execute the flush" operation, verify if result can get valid 
value, not be null
+      // when query memtable.
+      //      session.executeNonQueryStatement("FLUSH");
+
+      SessionDataSet dataSet =
+          session.executeQueryStatement(
+              "select time, s1 from write_and_alter_column_type order by 
time");
+      RowRecord rec;
+      TSDataType newType = isCompatible ? to : from;
+      for (int i = 1; i <= 2; i++) {
+        rec = dataSet.next();
+        assertEquals(i, rec.getFields().get(0).getLongV());
+        if (newType == TSDataType.BLOB) {
+          assertEquals(genValue(newType, i), 
rec.getFields().get(1).getBinaryV());
+        } else if (newType == TSDataType.DATE) {
+          assertEquals(genValue(newType, i), 
rec.getFields().get(1).getDateV());
+        } else if (newType == TSDataType.STRING || newType == TSDataType.TEXT) 
{
+          if (from == TSDataType.DATE) {
+            assertEquals(
+                new Binary(genValue(from, i).toString(), 
StandardCharsets.UTF_8),
+                rec.getFields().get(1).getBinaryV());
+          } else {
+            assertEquals(
+                newType.castFromSingleValue(from, genValue(from, i)),
+                rec.getFields().get(1).getBinaryV());
+          }
+        } else {
+          assertEquals(genValue(newType, i).toString(), 
rec.getFields().get(1).toString());
+        }
+      }
+      assertNull(dataSet.next());
+      dataSet.close();
+
+      // write an altered point in sequence and unsequnce tsfile
+      tablet =
+          new Tablet(
+              "write_and_alter_column_type",
+              Collections.singletonList("s1"),
+              Collections.singletonList(newType),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 3);
+      tablet.addValue("s1", 0, genValue(newType, 3));
+      session.insert(tablet);
+      tablet.reset();
+
+      tablet.addTimestamp(0, 1);
+      tablet.addValue("s1", 0, genValue(newType, 1));
+      session.insert(tablet);
+      tablet.reset();
+      session.executeNonQueryStatement("FLUSH");
+
+      // write an altered point in sequence and unsequnce memtable
+      tablet.addTimestamp(0, 4);
+      tablet.addValue("s1", 0, genValue(newType, 4));
+      session.insert(tablet);
+      tablet.reset();
+
+      tablet.addTimestamp(0, 2);
+      tablet.addValue("s1", 0, genValue(newType, 2));
+      session.insert(tablet);
+      tablet.reset();
+
+      dataSet =
+          session.executeQueryStatement(
+              "select time, s1 from write_and_alter_column_type order by 
time");
+      for (int i = 1; i <= 4; i++) {
+        rec = dataSet.next();
+        assertEquals(i, rec.getFields().get(0).getLongV());
+        if (newType == TSDataType.BLOB) {
+          assertEquals(genValue(newType, i), 
rec.getFields().get(1).getBinaryV());
+        } else if (newType == TSDataType.DATE) {
+          assertEquals(genValue(newType, i), 
rec.getFields().get(1).getDateV());
+        } else if (newType == TSDataType.STRING || newType == TSDataType.TEXT) 
{
+          assertEquals(genValue(to, i), rec.getFields().get(1).getBinaryV());
+        } else {
+          assertEquals(genValue(newType, i).toString(), 
rec.getFields().get(1).toString());
+        }
+      }
+      assertFalse(dataSet.hasNext());
+
+      dataSet =
+          session.executeQueryStatement(
+              "select min(s1),max(s1),first(s1),last(s1) from 
write_and_alter_column_type");
+      rec = dataSet.next();
+      int[] expectedValue = {1, 4, 1, 4};
+      for (int i = 0; i < 4; i++) {
+        if (newType == TSDataType.BLOB) {
+          assertEquals(genValue(newType, expectedValue[i]), 
rec.getFields().get(i).getBinaryV());
+        } else if (newType == TSDataType.DATE) {
+          assertEquals(genValue(newType, expectedValue[i]), 
rec.getFields().get(i).getDateV());
+        } else {
+          assertEquals(
+              genValue(newType, expectedValue[i]).toString(), 
rec.getFields().get(i).toString());
+        }
+      }
+      assertFalse(dataSet.hasNext());
+
+      if (newType.isNumeric()) {
+        dataSet =
+            session.executeQueryStatement(
+                "select avg(s1),sum(s1) from write_and_alter_column_type");
+        rec = dataSet.next();
+        assertEquals(2.5, rec.getFields().get(0).getDoubleV(), 0.001);
+        assertEquals(10.0, rec.getFields().get(1).getDoubleV(), 0.001);
+        assertFalse(dataSet.hasNext());
+      }
+
+    } finally {
+      try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+        session.executeNonQueryStatement("DROP TABLE 
write_and_alter_column_type");
+      }
+    }
+  }
+
+  @Test
+  public void testAlterWithoutWrite() throws IoTDBConnectionException, 
StatementExecutionException {
+    Set<TSDataType> typesToTest = new HashSet<>();
+    Collections.addAll(typesToTest, TSDataType.values());
+    typesToTest.remove(TSDataType.VECTOR);
+    typesToTest.remove(TSDataType.UNKNOWN);
+    //    typesToTest.remove(TSDataType.STRING);
+    //    typesToTest.remove(TSDataType.TEXT);
+    //    typesToTest.remove(TSDataType.DATE);
+
+    for (TSDataType from : typesToTest) {
+      for (TSDataType to : typesToTest) {
+        if (from != to && to.isCompatible(from)) {
+          System.out.printf("testing %s to %s%n", from, to);
+          doAlterWithoutWrite(from, to, false);
+          doAlterWithoutWrite(from, to, true);
+        }
+      }
+    }
+  }
+
+  private void doAlterWithoutWrite(TSDataType from, TSDataType to, boolean 
flush)
+      throws IoTDBConnectionException, StatementExecutionException {
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      if (from == TSDataType.DATE && !to.isCompatible(from)) {
+        throw new NotSupportedException("Not supported DATE type.");
+      }
+
+      // create a table with type of "from"
+      session.executeNonQueryStatement(
+          "CREATE TABLE IF NOT EXISTS just_alter_column_type (s1 " + from + 
")");
+
+      // alter the type to "to"
+      boolean isCompatible = MetadataUtils.canAlter(from, to);
+      if (isCompatible) {
+        session.executeNonQueryStatement(
+            "ALTER TABLE just_alter_column_type ALTER COLUMN s1 SET DATA TYPE 
" + to);
+      } else {
+        try {
+          session.executeNonQueryStatement(
+              "ALTER TABLE just_alter_column_type ALTER COLUMN s1 SET DATA 
TYPE " + to);
+        } catch (StatementExecutionException e) {
+          assertEquals(
+              "701: New type " + to + " is not compatible with the existing 
one " + from,
+              e.getMessage());
+        }
+      }
+
+      TSDataType newType = isCompatible ? to : from;
+
+      // write a point
+      Tablet tablet =
+          new Tablet(
+              "just_alter_column_type",
+              Collections.singletonList("s1"),
+              Collections.singletonList(newType),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 1);
+      tablet.addValue("s1", 0, genValue(newType, 1));
+      session.insert(tablet);
+      tablet.reset();
+
+      if (flush) {
+        session.executeNonQueryStatement("FLUSH");
+      }
+
+      SessionDataSet dataSet =
+          session.executeQueryStatement("select * from just_alter_column_type 
order by time");
+      RowRecord rec = dataSet.next();
+      assertEquals(1, rec.getFields().get(0).getLongV());
+      if (newType == TSDataType.BLOB) {
+        assertEquals(genValue(newType, 1), 
rec.getFields().get(1).getBinaryV());
+      } else if (newType == TSDataType.DATE) {
+        assertEquals(genValue(newType, 1), rec.getFields().get(1).getDateV());
+      } else {
+        assertEquals(genValue(newType, 1).toString(), 
rec.getFields().get(1).toString());
+      }
+
+      assertFalse(dataSet.hasNext());
+
+      session.executeNonQueryStatement("DROP TABLE just_alter_column_type");
+    }
+  }
+
+  @Test
+  public void testAlterNonExist() throws IoTDBConnectionException, 
StatementExecutionException {
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      try {
+        session.executeNonQueryStatement(
+            "ALTER TABLE non_exist ALTER COLUMN s1 SET DATA TYPE INT64");
+        fail("Should throw exception");
+      } catch (StatementExecutionException e) {
+        assertEquals("550: Table 'test.non_exist' does not exist", 
e.getMessage());
+      }
+      session.executeNonQueryStatement(
+          "ALTER TABLE IF EXISTS non_exist ALTER COLUMN s1 SET DATA TYPE 
INT64");
+
+      session.executeNonQueryStatement("CREATE TABLE IF NOT EXISTS non_exist 
(s1 int32)");
+
+      try {
+        session.executeNonQueryStatement(
+            "ALTER TABLE non_exist ALTER COLUMN s2 SET DATA TYPE INT64");
+        fail("Should throw exception");
+      } catch (StatementExecutionException e) {
+        assertEquals("616: Column s2 in table 'test.non_exist' does not 
exist.", e.getMessage());
+      }
+      session.executeNonQueryStatement(
+          "ALTER TABLE non_exist ALTER COLUMN IF EXISTS s2 SET DATA TYPE 
INT64");
+    }
+  }
+
+  @Test
+  public void testAlterWrongType() throws IoTDBConnectionException, 
StatementExecutionException {
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      session.executeNonQueryStatement("CREATE TABLE IF NOT EXISTS wrong_type 
(s1 int32)");
+
+      try {
+        session.executeNonQueryStatement(
+            "ALTER TABLE wrong_type ALTER COLUMN s1 SET DATA TYPE VECTOR");
+        fail("Should throw exception");
+      } catch (StatementExecutionException e) {
+        assertEquals("701: Unknown type: VECTOR", e.getMessage());
+      }
+    }
+  }
+
+  @Test
+  public void testDropAndAlter() throws IoTDBConnectionException, 
StatementExecutionException {
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      session.executeNonQueryStatement("SET CONFIGURATION 
enable_unseq_space_compaction='false'");
+      session.executeNonQueryStatement("SET CONFIGURATION 
enable_seq_space_compaction='false'");
+      session.executeNonQueryStatement("CREATE TABLE IF NOT EXISTS 
drop_and_alter (s1 int32)");
+
+      // time=1 and time=2 are INT32 and deleted by drop column
+      Tablet tablet =
+          new Tablet(
+              "drop_and_alter",
+              Collections.singletonList("s1"),
+              Collections.singletonList(TSDataType.INT32),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 1);
+      tablet.addValue("s1", 0, genValue(TSDataType.INT32, 1));
+      session.insert(tablet);
+      tablet.reset();
+
+      session.executeNonQueryStatement("FLUSH");
+
+      tablet =
+          new Tablet(
+              "drop_and_alter",
+              Collections.singletonList("s1"),
+              Collections.singletonList(TSDataType.INT32),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 2);
+      tablet.addValue("s1", 0, genValue(TSDataType.INT32, 2));
+      session.insert(tablet);
+      tablet.reset();
+
+      session.executeNonQueryStatement("ALTER TABLE drop_and_alter DROP COLUMN 
s1");
+
+      // time=3 and time=4 are STRING
+      tablet =
+          new Tablet(
+              "drop_and_alter",
+              Collections.singletonList("s1"),
+              Collections.singletonList(TSDataType.STRING),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 3);
+      tablet.addValue("s1", 0, genValue(TSDataType.STRING, 3));
+      session.insert(tablet);
+      tablet.reset();
+
+      session.executeNonQueryStatement("FLUSH");
+
+      tablet =
+          new Tablet(
+              "drop_and_alter",
+              Collections.singletonList("s1"),
+              Collections.singletonList(TSDataType.STRING),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 4);
+      tablet.addValue("s1", 0, genValue(TSDataType.STRING, 4));
+      session.insert(tablet);
+      tablet.reset();
+
+      session.executeNonQueryStatement(
+          "ALTER TABLE drop_and_alter ALTER COLUMN s1 SET DATA TYPE TEXT");
+
+      // time=5 and time=6 are TEXT
+      tablet =
+          new Tablet(
+              "drop_and_alter",
+              Collections.singletonList("s1"),
+              Collections.singletonList(TSDataType.TEXT),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 5);
+      tablet.addValue("s1", 0, genValue(TSDataType.STRING, 5));
+      session.insert(tablet);
+      tablet.reset();
+
+      session.executeNonQueryStatement("FLUSH");
+
+      tablet =
+          new Tablet(
+              "drop_and_alter",
+              Collections.singletonList("s1"),
+              Collections.singletonList(TSDataType.TEXT),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 6);
+      tablet.addValue("s1", 0, genValue(TSDataType.STRING, 6));
+      session.insert(tablet);
+      tablet.reset();
+
+      SessionDataSet dataSet =
+          session.executeQueryStatement("select * from drop_and_alter order by 
time");
+      // s1 is dropped but the time should remain
+      RowRecord rec;
+      for (int i = 1; i < 3; i++) {
+        rec = dataSet.next();
+        assertEquals(i, rec.getFields().get(0).getLongV());
+        log.error(
+            "time is {}, value is {}, value type is {}",
+            rec.getFields().get(0).getLongV(),
+            rec.getFields().get(1),
+            rec.getFields().get(1).getDataType());
+        //        assertNull(rec.getFields().get(1).getDataType());
+      }
+      for (int i = 3; i < 7; i++) {
+        rec = dataSet.next();
+        assertEquals(i, rec.getFields().get(0).getLongV());
+        assertEquals(genValue(TSDataType.STRING, i).toString(), 
rec.getFields().get(1).toString());
+      }
+      assertFalse(dataSet.hasNext());
+    }
+  }
+
+  @Test
+  public void testContinuousAlter() throws IoTDBConnectionException, 
StatementExecutionException {
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      session.executeNonQueryStatement("CREATE TABLE IF NOT EXISTS 
alter_and_alter (s1 int32)");
+
+      // time=1 and time=2 are INT32
+      Tablet tablet =
+          new Tablet(
+              "alter_and_alter",
+              Collections.singletonList("s1"),
+              Collections.singletonList(TSDataType.INT32),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 1);
+      tablet.addValue("s1", 0, genValue(TSDataType.INT32, 1));
+      session.insert(tablet);
+      tablet.reset();
+
+      session.executeNonQueryStatement("FLUSH");
+
+      tablet =
+          new Tablet(
+              "alter_and_alter",
+              Collections.singletonList("s1"),
+              Collections.singletonList(TSDataType.INT32),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 2);
+      tablet.addValue("s1", 0, genValue(TSDataType.INT32, 2));
+      session.insert(tablet);
+      tablet.reset();
+
+      // time=3 and time=4 are FLOAT
+      session.executeNonQueryStatement(
+          "ALTER TABLE alter_and_alter ALTER COLUMN s1 SET DATA TYPE FLOAT");
+      tablet =
+          new Tablet(
+              "alter_and_alter",
+              Collections.singletonList("s1"),
+              Collections.singletonList(TSDataType.FLOAT),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 3);
+      tablet.addValue("s1", 0, genValue(TSDataType.FLOAT, 3));
+      session.insert(tablet);
+      tablet.reset();
+
+      session.executeNonQueryStatement("FLUSH");
+
+      tablet =
+          new Tablet(
+              "alter_and_alter",
+              Collections.singletonList("s1"),
+              Collections.singletonList(TSDataType.FLOAT),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 4);
+      tablet.addValue("s1", 0, genValue(TSDataType.FLOAT, 4));
+      session.insert(tablet);
+      tablet.reset();
+
+      // time=5 and time=6 are DOUBLE
+      session.executeNonQueryStatement(
+          "ALTER TABLE alter_and_alter ALTER COLUMN s1 SET DATA TYPE DOUBLE");
+      tablet =
+          new Tablet(
+              "alter_and_alter",
+              Collections.singletonList("s1"),
+              Collections.singletonList(TSDataType.DOUBLE),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 5);
+      tablet.addValue("s1", 0, genValue(TSDataType.DOUBLE, 5));
+      session.insert(tablet);
+      tablet.reset();
+
+      session.executeNonQueryStatement("FLUSH");
+
+      tablet =
+          new Tablet(
+              "alter_and_alter",
+              Collections.singletonList("s1"),
+              Collections.singletonList(TSDataType.DOUBLE),
+              Collections.singletonList(ColumnCategory.FIELD));
+      tablet.addTimestamp(0, 6);
+      tablet.addValue("s1", 0, genValue(TSDataType.DOUBLE, 6));
+      session.insert(tablet);
+      tablet.reset();
+
+      SessionDataSet dataSet =
+          session.executeQueryStatement("select * from alter_and_alter order 
by time");
+      RowRecord rec;
+      for (int i = 1; i < 7; i++) {
+        rec = dataSet.next();
+        assertEquals(i, rec.getFields().get(0).getLongV());
+        assertEquals(genValue(TSDataType.DOUBLE, i).toString(), 
rec.getFields().get(1).toString());
+      }
+      assertFalse(dataSet.hasNext());
+    }
+  }
+
+  @Test
+  public void testConcurrentWriteAndAlter()
+      throws IoTDBConnectionException, StatementExecutionException, 
InterruptedException {
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      session.executeNonQueryStatement(
+          "CREATE TABLE IF NOT EXISTS concurrent_write_and_alter (s1 int32)");
+    }
+
+    ExecutorService threadPool = Executors.newCachedThreadPool();
+    AtomicInteger writeCounter = new AtomicInteger(0);
+    int maxWrite = 10000;
+    int flushInterval = 100;
+    int alterStart = 5000;
+    threadPool.submit(
+        () -> {
+          try {
+            write(writeCounter, maxWrite, flushInterval);
+          } catch (IoTDBConnectionException | StatementExecutionException e) {
+            throw new RuntimeException(e);
+          }
+        });
+    threadPool.submit(
+        () -> {
+          try {
+            alter(writeCounter, alterStart);
+          } catch (InterruptedException
+              | IoTDBConnectionException
+              | StatementExecutionException e) {
+            throw new RuntimeException(e);
+          }
+        });
+    threadPool.shutdown();
+    assertTrue(threadPool.awaitTermination(1, TimeUnit.MINUTES));
+
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      SessionDataSet dataSet =
+          session.executeQueryStatement("select count(s1) from 
concurrent_write_and_alter");
+      RowRecord rec;
+      rec = dataSet.next();
+      assertEquals(maxWrite, rec.getFields().get(0).getLongV());
+      assertFalse(dataSet.hasNext());
+    }
+  }
+
+  private void write(AtomicInteger writeCounter, int maxWrite, int 
flushInterval)
+      throws IoTDBConnectionException, StatementExecutionException {
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      int writtenCnt = 0;
+      do {
+        session.executeNonQueryStatement(
+            String.format(
+                "INSERT INTO concurrent_write_and_alter (time, s1) VALUES (%d, 
%d)",
+                writtenCnt, writtenCnt));
+        if (((writtenCnt + 1) % flushInterval) == 0) {
+          session.executeNonQueryStatement("FLUSH");
+        }
+      } while ((writtenCnt = writeCounter.incrementAndGet()) < maxWrite);
+    }
+  }
+
+  private void alter(AtomicInteger writeCounter, int alterStart)
+      throws InterruptedException, IoTDBConnectionException, 
StatementExecutionException {
+    while (writeCounter.get() < alterStart) {
+      Thread.sleep(10);
+    }
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      session.executeNonQueryStatement(
+          "ALTER TABLE concurrent_write_and_alter ALTER COLUMN s1 SET DATA 
TYPE DOUBLE");
+    }
+  }
+
+  @Test
+  public void testLoadAndAlter()
+      throws IoTDBConnectionException,
+          StatementExecutionException,
+          IOException,
+          WriteProcessException {
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      session.executeNonQueryStatement("SET CONFIGURATION 
enable_unseq_space_compaction='false'");
+    }
+
+    // file1-file4 s1=INT32
+    TableSchema schema1 =
+        new TableSchema(
+            "load_and_alter",
+            Arrays.asList(
+                new ColumnSchema("dId", TSDataType.STRING, ColumnCategory.TAG),
+                new ColumnSchema("s1", TSDataType.INT32, 
ColumnCategory.FIELD)));
+    // file1-file3 single device small range ([1, 1]), may load without split
+    List<File> filesToLoad = new ArrayList<>();
+    for (int i = 1; i <= 3; i++) {
+      File file = new File("target", "f" + i + ".tsfile");
+      try (ITsFileWriter tsFileWriter =
+          new TsFileWriterBuilder().file(file).tableSchema(schema1).build()) {
+        Tablet tablet =
+            new Tablet(
+                schema1.getTableName(),
+                Arrays.asList("dId", "s1"),
+                Arrays.asList(TSDataType.STRING, TSDataType.INT32),
+                Arrays.asList(ColumnCategory.TAG, ColumnCategory.FIELD));
+        tablet.addTimestamp(0, 1);
+        tablet.addValue("dId", 0, "d" + i);
+        tablet.addValue("s1", 0, 1);
+        tsFileWriter.write(tablet);
+      }
+      filesToLoad.add(file);
+    }
+    // file4 multi device large range ([2, 100_000_000]), load with split
+    File file = new File("target", "f" + 4 + ".tsfile");
+    try (ITsFileWriter tsFileWriter =
+        new TsFileWriterBuilder().file(file).tableSchema(schema1).build()) {
+      Tablet tablet =
+          new Tablet(
+              schema1.getTableName(),
+              Arrays.asList("dId", "s1"),
+              Arrays.asList(TSDataType.STRING, TSDataType.INT32),
+              Arrays.asList(ColumnCategory.TAG, ColumnCategory.FIELD));
+      int rowIndex = 0;
+      for (int i = 1; i <= 3; i++) {
+        tablet.addTimestamp(rowIndex, 2);
+        tablet.addValue("dId", rowIndex, "d" + i);
+        tablet.addValue("s1", rowIndex, 2);
+        rowIndex++;
+        tablet.addTimestamp(rowIndex, 100_000_000);
+        tablet.addValue("dId", rowIndex, "d" + i);
+        tablet.addValue("s1", rowIndex, 100_000_000);
+        rowIndex++;
+      }
+      tsFileWriter.write(tablet);
+    }
+    filesToLoad.add(file);
+
+    // load file1-file4
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      for (File f : filesToLoad) {
+        session.executeNonQueryStatement("LOAD '" + f.getAbsolutePath() + "'");
+      }
+    }
+    // check load result
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      SessionDataSet dataSet =
+          session.executeQueryStatement("select count(s1) from 
load_and_alter");
+      RowRecord rec;
+      rec = dataSet.next();
+      assertEquals(9, rec.getFields().get(0).getLongV());
+      assertFalse(dataSet.hasNext());
+    }
+
+    filesToLoad.forEach(
+        tsfile -> {
+          tsfile.delete();
+          File resourceFile = new File(tsfile.getAbsolutePath() + ".resource");
+          resourceFile.delete();
+        });
+    filesToLoad.clear();
+
+    // file5-file8 s1=DOUBLE
+    TableSchema schema2 =
+        new TableSchema(
+            "load_and_alter",
+            Arrays.asList(
+                new ColumnSchema("dId", TSDataType.STRING, ColumnCategory.TAG),
+                new ColumnSchema("s1", TSDataType.DOUBLE, 
ColumnCategory.FIELD)));
+    // file5-file7 single device small range ([3, 3]), may load without split
+    for (int i = 5; i <= 7; i++) {
+      file = new File("target", "f" + i + ".tsfile");
+      try (ITsFileWriter tsFileWriter =
+          new TsFileWriterBuilder().file(file).tableSchema(schema2).build()) {
+        Tablet tablet =
+            new Tablet(
+                schema2.getTableName(),
+                Arrays.asList("dId", "s1"),
+                Arrays.asList(TSDataType.STRING, TSDataType.DOUBLE),
+                Arrays.asList(ColumnCategory.TAG, ColumnCategory.FIELD));
+        tablet.addTimestamp(0, 3);
+        tablet.addValue("dId", 0, "d" + i);
+        tablet.addValue("s1", 0, 3.0);
+        tsFileWriter.write(tablet);
+      }
+      filesToLoad.add(file);
+    }
+    // file8 multi device large range ([4, 100_000_001]), load with split
+    file = new File("target", "f" + 8 + ".tsfile");
+    try (ITsFileWriter tsFileWriter =
+        new TsFileWriterBuilder().file(file).tableSchema(schema2).build()) {
+      Tablet tablet =
+          new Tablet(
+              schema2.getTableName(),
+              Arrays.asList("dId", "s1"),
+              Arrays.asList(TSDataType.STRING, TSDataType.DOUBLE),
+              Arrays.asList(ColumnCategory.TAG, ColumnCategory.FIELD));
+      int rowIndex = 0;
+      for (int i = 1; i <= 3; i++) {
+        tablet.addTimestamp(rowIndex, 4);
+        tablet.addValue("dId", rowIndex, "d" + i);
+        tablet.addValue("s1", rowIndex, 4.0);
+        rowIndex++;
+        tablet.addTimestamp(rowIndex, 100_000_001);
+        tablet.addValue("dId", rowIndex, "d" + i);
+        tablet.addValue("s1", rowIndex, 100_000_001.0);
+        rowIndex++;
+      }
+      tsFileWriter.write(tablet);
+    }
+    filesToLoad.add(file);
+
+    // load file5-file8
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      for (File f : filesToLoad) {
+        session.executeNonQueryStatement("LOAD '" + f.getAbsolutePath() + "'");
+      }
+    }
+    // check load result
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      SessionDataSet dataSet =
+          session.executeQueryStatement("select count(s1) from 
load_and_alter");
+      RowRecord rec;
+      rec = dataSet.next();
+      // Due to the operation of load tsfile execute directly, don't access 
memtable or generate
+      // InsertNode object, so don't need to check the data type.
+      // When query this measurement point, will only find the data of 
TSDataType.INT32. So this is
+      // reason what cause we can't find the data of TSDataType.DOUBLE. So 
result is 9, is not 18.
+      //      assertEquals(18, rec.getFields().get(0).getLongV());
+      assertEquals(9, rec.getFields().get(0).getLongV());
+      assertFalse(dataSet.hasNext());
+    }
+
+    // alter s1 to double
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      session.executeNonQueryStatement(
+          "ALTER TABLE load_and_alter ALTER COLUMN s1 SET DATA TYPE DOUBLE");
+    }
+
+    filesToLoad.forEach(
+        tsfile -> {
+          tsfile.delete();
+          File resourceFile = new File(tsfile.getAbsolutePath() + ".resource");
+          resourceFile.delete();
+        });
+    filesToLoad.clear();
+
+    // file9-file12 s1=INT32
+    // file9-file11 single device small range ([5, 5]), may load without split
+    for (int i = 9; i <= 11; i++) {
+      file = new File("target", "f" + i + ".tsfile");
+      try (ITsFileWriter tsFileWriter =
+          new TsFileWriterBuilder().file(file).tableSchema(schema1).build()) {
+        Tablet tablet =
+            new Tablet(
+                schema1.getTableName(),
+                Arrays.asList("dId", "s1"),
+                Arrays.asList(TSDataType.STRING, TSDataType.INT32),
+                Arrays.asList(ColumnCategory.TAG, ColumnCategory.FIELD));
+        tablet.addTimestamp(0, 5);
+        tablet.addValue("dId", 0, "d" + i);
+        tablet.addValue("s1", 0, 5);
+        tsFileWriter.write(tablet);
+      }
+      filesToLoad.add(file);
+    }
+    // file12 multi device large range ([6, 100_000_002]), load with split
+    file = new File("target", "f" + 12 + ".tsfile");
+    try (ITsFileWriter tsFileWriter =
+        new TsFileWriterBuilder().file(file).tableSchema(schema1).build()) {
+      Tablet tablet =
+          new Tablet(
+              schema1.getTableName(),
+              Arrays.asList("dId", "s1"),
+              Arrays.asList(TSDataType.STRING, TSDataType.INT32),
+              Arrays.asList(ColumnCategory.TAG, ColumnCategory.FIELD));
+      int rowIndex = 0;
+      for (int i = 1; i <= 3; i++) {
+        tablet.addTimestamp(rowIndex, 6);
+        tablet.addValue("dId", rowIndex, "d" + i);
+        tablet.addValue("s1", rowIndex, 6);
+        rowIndex++;
+        tablet.addTimestamp(rowIndex, 100_000_002);
+        tablet.addValue("dId", rowIndex, "d" + i);
+        tablet.addValue("s1", rowIndex, 100_000_002);
+        rowIndex++;
+      }
+      tsFileWriter.write(tablet);
+    }
+    filesToLoad.add(file);
+
+    // load file9-file12, should succeed
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      for (File f : filesToLoad) {
+        session.executeNonQueryStatement("LOAD '" + f.getAbsolutePath() + "'");
+      }
+    }
+    // check load result
+    try (ITableSession session = 
EnvFactory.getEnv().getTableSessionConnectionWithDB("test")) {
+      SessionDataSet dataSet =
+          session.executeQueryStatement("select count(s1) from 
load_and_alter");
+      RowRecord rec;
+      rec = dataSet.next();
+      assertEquals(27, rec.getFields().get(0).getLongV());
+      assertFalse(dataSet.hasNext());
+    }
+
+    filesToLoad.forEach(
+        tsfile -> {
+          tsfile.delete();
+          File resourceFile = new File(tsfile.getAbsolutePath() + ".resource");
+          resourceFile.delete();
+        });
+    filesToLoad.clear();
+  }
+
+  @Test
+  public void testAlterViewType() throws IoTDBConnectionException, 
StatementExecutionException {
+    String[] createTreeDataSqls = {
+      "CREATE ALIGNED TIMESERIES root.db.battery.b0(voltage INT32, current 
FLOAT)",
+      "INSERT INTO root.db.battery.b0(time, voltage, current) aligned values ("
+          + (System.currentTimeMillis() - 100000)
+          + ", 1, 1)",
+      "CREATE ALIGNED TIMESERIES root.db.battery.b1(voltage INT32, current 
FLOAT)",
+      "INSERT INTO root.db.battery.b1(time, voltage, current) aligned values 
(1, 1, 1)",
+      "INSERT INTO root.db.battery.b1(time, voltage, current) aligned values 
(2, 1, 1)",
+      "INSERT INTO root.db.battery.b1(time, voltage, current) aligned values 
(3, 1, 1)",
+      "INSERT INTO root.db.battery.b1(time, voltage, current) aligned values 
(4, 1, 1)",
+      "INSERT INTO root.db.battery.b1(time, voltage, current) aligned values ("
+          + System.currentTimeMillis()
+          + ", 1, 1)",
+      "CREATE TIMESERIES root.db.battery.b2.voltage INT32",
+      "CREATE TIMESERIES root.db.battery.b2.current FLOAT",

Review Comment:
   Do not put tree model tests in this package.
   ![Uploading image.png…]()
   



##########
iotdb-core/datanode/src/main/java/org/apache/iotdb/db/utils/datastructure/AlignedTVList.java:
##########
@@ -1026,12 +1027,14 @@ private void markNullValue(
     }
   }
 
-  private void markNullValue(int columnIndex, int arrayIndex, int 
elementIndex) {
+  private boolean markNullValue(int columnIndex, int arrayIndex, int 
elementIndex) {
     // mark the null value in the current bitmap
-    if (bitMaps.get(columnIndex).get(arrayIndex).isMarked(elementIndex)) {
+    BitMap bitMap = getBitMap(columnIndex, arrayIndex);
+    bitMap.mark(elementIndex);
+    if (bitMap.isMarked(elementIndex)) {

Review Comment:
   Check this



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to