This is an automated email from the ASF dual-hosted git repository.

haonan pushed a commit to branch query_v2_py
in repository https://gitbox.apache.org/repos/asf/iotdb.git

commit ff68bc14ddb953048f6e9efd0d833cdcc01e2cc7
Author: HTHou <[email protected]>
AuthorDate: Thu Mar 20 11:58:21 2025 +0800

    dev
---
 iotdb-client/client-py/iotdb/Session.py            |   6 +-
 .../client-py/iotdb/tsfile/utils/tsblock_serde.py  |  10 +-
 .../client-py/iotdb/utils/IoTDBRpcDataSet.py       |  68 ++--
 .../client-py/iotdb/utils/SessionDataSet.py        |   4 +-
 iotdb-client/client-py/session_example.py          | 367 +--------------------
 5 files changed, 48 insertions(+), 407 deletions(-)

diff --git a/iotdb-client/client-py/iotdb/Session.py 
b/iotdb-client/client-py/iotdb/Session.py
index 4925aed9371..2ab2428a213 100644
--- a/iotdb-client/client-py/iotdb/Session.py
+++ b/iotdb-client/client-py/iotdb/Session.py
@@ -1483,13 +1483,13 @@ class Session(object):
             self.__session_id, sql, self.__statement_id, self.__fetch_size, 
timeout
         )
         try:
-            resp = self.__client.executeQueryStatement(request)
+            resp = self.__client.executeQueryStatementV2(request)
         except TTransport.TException as e:
             if self.reconnect():
                 try:
                     request.sessionId = self.__session_id
                     request.statementId = self.__statement_id
-                    resp = self.__client.executeQueryStatement(request)
+                    resp = self.__client.executeQueryStatementV2(request)
                 except TTransport.TException as e1:
                     raise IoTDBConnectionException(e1) from None
             else:
@@ -1505,7 +1505,7 @@ class Session(object):
             self.__client,
             self.__statement_id,
             self.__session_id,
-            resp.queryDataSet,
+            resp.queryResult,
             resp.ignoreTimeStamp,
         )
 
diff --git a/iotdb-client/client-py/iotdb/tsfile/utils/tsblock_serde.py 
b/iotdb-client/client-py/iotdb/tsfile/utils/tsblock_serde.py
index cfcf37a18fb..be86c26666c 100644
--- a/iotdb-client/client-py/iotdb/tsfile/utils/tsblock_serde.py
+++ b/iotdb-client/client-py/iotdb/tsfile/utils/tsblock_serde.py
@@ -110,7 +110,7 @@ def convert_to_df(name_list, type_list, name_index, 
binary_list):
             time_column_values, np.dtype(np.longlong).newbyteorder(">")
         )
         if time_array.dtype.byteorder == ">":
-            time_array = 
time_array.byteswap().view(time_array.dtype.newbyteorder("<"))
+            time_array = time_array.byteswap().newbyteorder("<")
 
         if result[TIMESTAMP_STR] is None:
             result[TIMESTAMP_STR] = time_array
@@ -165,9 +165,7 @@ def convert_to_df(name_list, type_list, name_index, 
binary_list):
                 raise RuntimeError("unsupported data type 
{}.".format(data_type))
 
             if data_array.dtype.byteorder == ">":
-                data_array = data_array.byteswap().view(
-                    data_array.dtype.newbyteorder("<")
-                )
+                data_array = data_array.byteswap().newbyteorder("<")
 
             null_indicator = null_indicators[location]
             if len(data_array) < total_length or (
@@ -274,8 +272,8 @@ def deserialize(buffer):
     column_encodings, buffer = read_column_encoding(buffer, value_column_count 
+ 1)
 
     time_column_values, buffer = read_time_column(buffer, position_count)
-    column_values = [] * value_column_count
-    null_indicators = [] * value_column_count
+    column_values = [None] * value_column_count
+    null_indicators = [None] * value_column_count
     for i in range(value_column_count):
         column_value, null_indicator, buffer = read_column(
             column_encodings[i + 1], buffer, data_types[i], position_count
diff --git a/iotdb-client/client-py/iotdb/utils/IoTDBRpcDataSet.py 
b/iotdb-client/client-py/iotdb/utils/IoTDBRpcDataSet.py
index f486fc5cd63..7d4c17907bd 100644
--- a/iotdb-client/client-py/iotdb/utils/IoTDBRpcDataSet.py
+++ b/iotdb-client/client-py/iotdb/utils/IoTDBRpcDataSet.py
@@ -25,6 +25,7 @@ import pandas as pd
 from thrift.transport import TTransport
 from iotdb.thrift.rpc.IClientRPCService import TSFetchResultsReq, 
TSCloseOperationReq
 from iotdb.tsfile.utils.date_utils import parse_int_to_date
+from iotdb.tsfile.utils.tsblock_serde import deserialize
 from iotdb.utils.IoTDBConnectionException import IoTDBConnectionException
 from iotdb.utils.IoTDBConstants import TSDataType
 
@@ -51,7 +52,7 @@ class IoTDBRpcDataSet(object):
         client,
         statement_id,
         session_id,
-        query_data_set,
+        query_result,
         fetch_size,
     ):
         self.__statement_id = statement_id
@@ -99,7 +100,9 @@ class IoTDBRpcDataSet(object):
                     self.column_type_deduplicated_list.append(
                         TSDataType[column_type_list[i]]
                     )
-        self.__query_data_set = query_data_set
+        self.__query_result = query_result
+        if query_result is not None:
+            self.__query_result_size = len(query_result)
         self.__is_closed = False
         self.__empty_resultSet = False
         self.__rows_index = 0
@@ -144,13 +147,19 @@ class IoTDBRpcDataSet(object):
     def construct_one_data_frame(self):
         if (
             self.has_cached_data_frame
-            or self.__query_data_set is None
-            or len(self.__query_data_set.time) == 0
+            or self.__query_result is None
+            or len(self.__query_result) == 0
         ):
             return
+        binary_size = len(self.__query_result)
+        binary_index = 0
         result = {}
+        time_column_values, column_values, null_indicators, _ = deserialize(
+            self.__query_result[0]
+        )
+        self.__query_result = None
         time_array = np.frombuffer(
-            self.__query_data_set.time, np.dtype(np.longlong).newbyteorder(">")
+            time_column_values, np.dtype(np.longlong).newbyteorder(">")
         )
         if time_array.dtype.byteorder == ">":
             time_array = 
time_array.byteswap().view(time_array.dtype.newbyteorder("<"))
@@ -168,7 +177,7 @@ class IoTDBRpcDataSet(object):
             if location < 0:
                 continue
             data_type = self.column_type_deduplicated_list[location]
-            value_buffer = self.__query_data_set.valueList[location]
+            value_buffer = column_values[location]
             value_buffer_len = len(value_buffer)
             # DOUBLE
             if data_type == 4:
@@ -216,15 +225,14 @@ class IoTDBRpcDataSet(object):
                 data_array = data_array.byteswap().view(
                     data_array.dtype.newbyteorder("<")
                 )
+
+            null_indicator = null_indicators[location]
+
             if len(data_array) < total_length:
                 tmp_array = np.full(total_length, None, dtype=object)
-
-                bitmap_buffer = self.__query_data_set.bitmapList[location]
-                buffer = _to_bitbuffer(bitmap_buffer)
-                bit_mask = (np.frombuffer(buffer, "u1") - 
ord("0")).astype(bool)
-                if len(bit_mask) != total_length:
-                    bit_mask = bit_mask[:total_length]
-                tmp_array[bit_mask] = data_array
+                if null_indicator is not None:
+                    indexes = [not v for v in null_indicator]
+                    tmp_array[indexes] = data_array
 
                 # INT32, DATE
                 if data_type == 1 or data_type == 9:
@@ -238,8 +246,8 @@ class IoTDBRpcDataSet(object):
                 data_array = tmp_array
 
             result[i + 1] = data_array
-        self.__query_data_set = None
-        self.data_frame = pd.DataFrame(result, dtype=object)
+        self.data_frame = pd.DataFrame(result)
+        self.__query_result = None
         if not self.data_frame.empty:
             self.has_cached_data_frame = True
 
@@ -247,9 +255,7 @@ class IoTDBRpcDataSet(object):
         return self.has_cached_data_frame
 
     def _has_next_result_set(self):
-        if (self.__query_data_set is not None) and (
-            len(self.__query_data_set.time) != 0
-        ):
+        if (self.__query_result is not None) and (len(self.__query_result[0]) 
!= 0):
             return True
         if self.__empty_resultSet:
             return False
@@ -262,8 +268,12 @@ class IoTDBRpcDataSet(object):
         for column_name in self.__column_name_list:
             result[column_name] = []
         while self._has_next_result_set():
+            time_column_values, column_values, null_indicators, _ = 
deserialize(
+                self.__query_result[0]
+            )
+            self.__query_result = None
             time_array = np.frombuffer(
-                self.__query_data_set.time, 
np.dtype(np.longlong).newbyteorder(">")
+                time_column_values, np.dtype(np.longlong).newbyteorder(">")
             )
             if time_array.dtype.byteorder == ">":
                 time_array = time_array.byteswap().view(
@@ -272,10 +282,9 @@ class IoTDBRpcDataSet(object):
             if self.ignore_timestamp is None or self.ignore_timestamp is False:
                 result[IoTDBRpcDataSet.TIMESTAMP_STR].append(time_array)
 
-            self.__query_data_set.time = []
             total_length = len(time_array)
 
-            for i in range(len(self.__query_data_set.bitmapList)):
+            for i in range(len(column_values)):
                 if self.ignore_timestamp is True:
                     column_name = self.__column_name_list[i]
                 else:
@@ -287,7 +296,7 @@ class IoTDBRpcDataSet(object):
                 if location < 0:
                     continue
                 data_type = self.column_type_deduplicated_list[location]
-                value_buffer = self.__query_data_set.valueList[location]
+                value_buffer = column_values[location]
                 value_buffer_len = len(value_buffer)
                 # DOUBLE
                 if data_type == 4:
@@ -359,8 +368,8 @@ class IoTDBRpcDataSet(object):
                     data_array = data_array.byteswap().view(
                         data_array.dtype.newbyteorder("<")
                     )
-                self.__query_data_set.valueList[location] = None
                 tmp_array = []
+                null_indicator = null_indicators[location]
                 if len(data_array) < total_length:
                     # BOOLEAN, INT32, INT64, TIMESTAMP
                     if (
@@ -384,12 +393,9 @@ class IoTDBRpcDataSet(object):
                     ):
                         tmp_array = np.full(total_length, None, 
dtype=data_array.dtype)
 
-                    bitmap_buffer = self.__query_data_set.bitmapList[location]
-                    buffer = _to_bitbuffer(bitmap_buffer)
-                    bit_mask = (np.frombuffer(buffer, "u1") - 
ord("0")).astype(bool)
-                    if len(bit_mask) != total_length:
-                        bit_mask = bit_mask[:total_length]
-                    tmp_array[bit_mask] = data_array
+                    if null_indicator is not None:
+                        indexes = [not v for v in null_indicator]
+                        tmp_array[indexes] = data_array
 
                     if data_type == 1:
                         tmp_array = pd.Series(tmp_array).astype("Int32")
@@ -430,11 +436,11 @@ class IoTDBRpcDataSet(object):
             self.__default_time_out,
         )
         try:
-            resp = self.__client.fetchResults(request)
+            resp = self.__client.fetchResultsV2(request)
             if not resp.hasResultSet:
                 self.__empty_resultSet = True
             else:
-                self.__query_data_set = resp.queryDataSet
+                self.__query_result = resp.queryResult
             return resp.hasResultSet
         except TTransport.TException as e:
             raise RuntimeError(
diff --git a/iotdb-client/client-py/iotdb/utils/SessionDataSet.py 
b/iotdb-client/client-py/iotdb/utils/SessionDataSet.py
index f4d63035efc..50dc123aa57 100644
--- a/iotdb-client/client-py/iotdb/utils/SessionDataSet.py
+++ b/iotdb-client/client-py/iotdb/utils/SessionDataSet.py
@@ -40,7 +40,7 @@ class SessionDataSet(object):
         client,
         statement_id,
         session_id,
-        query_data_set,
+        query_result,
         ignore_timestamp,
     ):
         self.iotdb_rpc_data_set = IoTDBRpcDataSet(
@@ -53,7 +53,7 @@ class SessionDataSet(object):
             client,
             statement_id,
             session_id,
-            query_data_set,
+            query_result,
             5000,
         )
         self.column_size = self.iotdb_rpc_data_set.column_size
diff --git a/iotdb-client/client-py/session_example.py 
b/iotdb-client/client-py/session_example.py
index ca610de9a0c..676682794ac 100644
--- a/iotdb-client/client-py/session_example.py
+++ b/iotdb-client/client-py/session_example.py
@@ -41,378 +41,15 @@ session = Session.init_from_node_urls(
 )
 session.open(False)
 
-# create and delete databases
-session.set_storage_group("root.sg_test_01")
-session.set_storage_group("root.sg_test_02")
-session.set_storage_group("root.sg_test_03")
-session.set_storage_group("root.sg_test_04")
-session.delete_storage_group("root.sg_test_02")
-session.delete_storage_groups(["root.sg_test_03", "root.sg_test_04"])
-
-# setting time series.
-session.create_time_series(
-    "root.sg_test_01.d_01.s_01", TSDataType.BOOLEAN, TSEncoding.PLAIN, 
Compressor.SNAPPY
-)
-session.create_time_series(
-    "root.sg_test_01.d_01.s_02", TSDataType.INT32, TSEncoding.PLAIN, 
Compressor.SNAPPY
-)
-session.create_time_series(
-    "root.sg_test_01.d_01.s_03", TSDataType.INT64, TSEncoding.PLAIN, 
Compressor.SNAPPY
-)
-session.create_time_series(
-    "root.sg_test_01.d_02.s_01",
-    TSDataType.BOOLEAN,
-    TSEncoding.PLAIN,
-    Compressor.SNAPPY,
-    None,
-    {"tag1": "v1"},
-    {"description": "v1"},
-    "temperature",
-)
-
-# setting multiple time series once.
-ts_path_lst_ = [
-    "root.sg_test_01.d_01.s_04",
-    "root.sg_test_01.d_01.s_05",
-    "root.sg_test_01.d_01.s_06",
-    "root.sg_test_01.d_01.s_07",
-    "root.sg_test_01.d_01.s_08",
-    "root.sg_test_01.d_01.s_09",
-]
-data_type_lst_ = [
-    TSDataType.FLOAT,
-    TSDataType.DOUBLE,
-    TSDataType.TEXT,
-    TSDataType.FLOAT,
-    TSDataType.DOUBLE,
-    TSDataType.TEXT,
-]
-encoding_lst_ = [TSEncoding.PLAIN for _ in range(len(data_type_lst_))]
-compressor_lst_ = [Compressor.SNAPPY for _ in range(len(data_type_lst_))]
-session.create_multi_time_series(
-    ts_path_lst_, data_type_lst_, encoding_lst_, compressor_lst_
-)
-
-ts_path_lst_ = [
-    "root.sg_test_01.d_02.s_04",
-    "root.sg_test_01.d_02.s_05",
-    "root.sg_test_01.d_02.s_06",
-    "root.sg_test_01.d_02.s_07",
-    "root.sg_test_01.d_02.s_08",
-    "root.sg_test_01.d_02.s_09",
-]
-data_type_lst_ = [
-    TSDataType.FLOAT,
-    TSDataType.DOUBLE,
-    TSDataType.TEXT,
-    TSDataType.FLOAT,
-    TSDataType.DOUBLE,
-    TSDataType.TEXT,
-]
-encoding_lst_ = [TSEncoding.PLAIN for _ in range(len(data_type_lst_))]
-compressor_lst_ = [Compressor.SNAPPY for _ in range(len(data_type_lst_))]
-tags_lst_ = [{"tag2": "v2"} for _ in range(len(data_type_lst_))]
-attributes_lst_ = [{"description": "v2"} for _ in range(len(data_type_lst_))]
-session.create_multi_time_series(
-    ts_path_lst_,
-    data_type_lst_,
-    encoding_lst_,
-    compressor_lst_,
-    None,
-    tags_lst_,
-    attributes_lst_,
-    None,
-)
-
-# delete time series
-session.delete_time_series(
-    [
-        "root.sg_test_01.d_01.s_07",
-        "root.sg_test_01.d_01.s_08",
-        "root.sg_test_01.d_01.s_09",
-    ]
-)
-
-# checking time series
-print(
-    "s_07 expecting False, checking result: ",
-    session.check_time_series_exists("root.sg_test_01.d_01.s_07"),
-)
-print(
-    "s_03 expecting True, checking result: ",
-    session.check_time_series_exists("root.sg_test_01.d_01.s_03"),
-)
-print(
-    "d_02.s_01 expecting True, checking result: ",
-    session.check_time_series_exists("root.sg_test_01.d_02.s_01"),
-)
-print(
-    "d_02.s_06 expecting True, checking result: ",
-    session.check_time_series_exists("root.sg_test_01.d_02.s_06"),
-)
-
-# insert one record into the database.
-measurements_ = ["s_01", "s_02", "s_03", "s_04", "s_05", "s_06"]
-values_ = [False, 10, 11, 1.1, 10011.1, "test_record"]
-data_types_ = [
-    TSDataType.BOOLEAN,
-    TSDataType.INT32,
-    TSDataType.INT64,
-    TSDataType.FLOAT,
-    TSDataType.DOUBLE,
-    TSDataType.TEXT,
-]
-session.insert_record("root.sg_test_01.d_01", 1, measurements_, data_types_, 
values_)
-
-# insert multiple records into database
-measurements_list_ = [
-    ["s_01", "s_02", "s_03", "s_04", "s_05", "s_06"],
-    ["s_01", "s_02", "s_03", "s_04", "s_05", "s_06"],
-]
-values_list_ = [
-    [False, 22, 33, 4.4, 55.1, "test_records01"],
-    [True, 77, 88, 1.25, 8.125, bytes("test_records02", "utf-8")],
-]
-data_type_list_ = [data_types_, data_types_]
-device_ids_ = ["root.sg_test_01.d_01", "root.sg_test_01.d_01"]
-session.insert_records(
-    device_ids_, [2, 3], measurements_list_, data_type_list_, values_list_
-)
-
-# insert one tablet into the database.
-values_ = [
-    [False, 10, 11, 1.1, 10011.1, "test01"],
-    [True, 100, 11111, 1.25, 101.0, "test02"],
-    [False, 100, 1, 188.1, 688.25, "test03"],
-    [True, 0, 0, 0, 6.25, "test04"],
-]  # Non-ASCII text will cause error since bytes can only hold 0-128 nums.
-timestamps_ = [4, 5, 6, 7]
-tablet_ = Tablet(
-    "root.sg_test_01.d_01", measurements_, data_types_, values_, timestamps_
-)
-session.insert_tablet(tablet_)
-
-# insert one numpy tablet into the database.
-np_values_ = [
-    np.array([False, True, False, True], TSDataType.BOOLEAN.np_dtype()),
-    np.array([10, 100, 100, 0], TSDataType.INT32.np_dtype()),
-    np.array([11, 11111, 1, 0], TSDataType.INT64.np_dtype()),
-    np.array([1.1, 1.25, 188.1, 0], TSDataType.FLOAT.np_dtype()),
-    np.array([10011.1, 101.0, 688.25, 6.25], TSDataType.DOUBLE.np_dtype()),
-    np.array(["test01", "test02", "test03", "test04"], 
TSDataType.TEXT.np_dtype()),
-]
-np_timestamps_ = np.array([1, 2, 3, 4], TSDataType.INT64.np_dtype())
-np_tablet_ = NumpyTablet(
-    "root.sg_test_01.d_02", measurements_, data_types_, np_values_, 
np_timestamps_
-)
-session.insert_tablet(np_tablet_)
-
-# insert one unsorted numpy tablet into the database.
-np_values_unsorted = [
-    np.array([False, False, False, True, True], np.dtype(">?")),
-    np.array([0, 10, 100, 1000, 10000], np.dtype(">i4")),
-    np.array([1, 11, 111, 1111, 11111], np.dtype(">i8")),
-    np.array([1.1, 1.25, 188.1, 0, 8.999], np.dtype(">f4")),
-    np.array([10011.1, 101.0, 688.25, 6.25, 8, 776], np.dtype(">f8")),
-    np.array(["test09", "test08", "test07", "test06", "test05"]),
-]
-np_timestamps_unsorted = np.array([9, 8, 7, 6, 5], np.dtype(">i8"))
-np_tablet_unsorted = NumpyTablet(
-    "root.sg_test_01.d_02",
-    measurements_,
-    data_types_,
-    np_values_unsorted,
-    np_timestamps_unsorted,
-)
-
-# insert one numpy tablet into the database.
-np_values_ = [
-    np.array([False, True, False, True], TSDataType.BOOLEAN.np_dtype()),
-    np.array([10, 100, 100, 0], TSDataType.INT32.np_dtype()),
-    np.array([11, 11111, 1, 0], TSDataType.INT64.np_dtype()),
-    np.array([1.1, 1.25, 188.1, 0], TSDataType.FLOAT.np_dtype()),
-    np.array([10011.1, 101.0, 688.25, 6.25], TSDataType.DOUBLE.np_dtype()),
-    np.array(["test01", "test02", "test03", "test04"]),
-]
-np_timestamps_ = np.array([98, 99, 100, 101], TSDataType.INT64.np_dtype())
-np_bitmaps_ = []
-for i in range(len(measurements_)):
-    np_bitmaps_.append(BitMap(len(np_timestamps_)))
-np_bitmaps_[0].mark(0)
-np_bitmaps_[1].mark(1)
-np_bitmaps_[2].mark(2)
-np_bitmaps_[4].mark(3)
-np_bitmaps_[5].mark(3)
-np_tablet_with_none = NumpyTablet(
-    "root.sg_test_01.d_02",
-    measurements_,
-    data_types_,
-    np_values_,
-    np_timestamps_,
-    np_bitmaps_,
-)
-session.insert_tablet(np_tablet_with_none)
-
-
-session.insert_tablet(np_tablet_unsorted)
-print(np_tablet_unsorted.get_timestamps())
-for value in np_tablet_unsorted.get_values():
-    print(value)
-
-# insert multiple tablets into database
-tablet_01 = Tablet(
-    "root.sg_test_01.d_01", measurements_, data_types_, values_, [8, 9, 10, 11]
-)
-tablet_02 = Tablet(
-    "root.sg_test_01.d_01", measurements_, data_types_, values_, [12, 13, 14, 
15]
-)
-session.insert_tablets([tablet_01, tablet_02])
-
-# insert one tablet with empty cells into the database.
-values_ = [
-    [None, 10, 11, 1.1, 10011.1, "test01"],
-    [True, None, 11111, 1.25, 101.0, "test02"],
-    [False, 100, 1, None, 688.25, "test03"],
-    [True, 0, 0, 0, 6.25, None],
-]  # Non-ASCII text will cause error since bytes can only hold 0-128 nums.
-timestamps_ = [16, 17, 18, 19]
-tablet_ = Tablet(
-    "root.sg_test_01.d_01", measurements_, data_types_, values_, timestamps_
-)
-session.insert_tablet(tablet_)
-
-# insert records of one device
-time_list = [1, 2, 3]
-measurements_list = [
-    ["s_01", "s_02", "s_03"],
-    ["s_01", "s_02", "s_03"],
-    ["s_01", "s_02", "s_03"],
-]
-data_types_list = [
-    [TSDataType.BOOLEAN, TSDataType.INT32, TSDataType.INT64],
-    [TSDataType.BOOLEAN, TSDataType.INT32, TSDataType.INT64],
-    [TSDataType.BOOLEAN, TSDataType.INT32, TSDataType.INT64],
-]
-values_list = [[False, 22, 33], [True, 1, 23], [False, 15, 26]]
-
-session.insert_records_of_one_device(
-    "root.sg_test_01.d_01", time_list, measurements_list, data_types_list, 
values_list
-)
-
-# execute non-query sql statement
-session.execute_non_query_statement(
-    "insert into root.sg_test_01.d_01(timestamp, s_02) values(16, 188)"
-)
-
-# execute sql query statement
-with session.execute_query_statement(
-    "select * from root.sg_test_01.d_01"
-) as session_data_set:
-    session_data_set.set_fetch_size(1024)
-    while session_data_set.has_next():
-        print(session_data_set.next())
-# execute sql query statement
-with session.execute_query_statement(
-    "select s_01, s_02, s_03, s_04, s_05, s_06 from root.sg_test_01.d_02"
-) as session_data_set:
-    session_data_set.set_fetch_size(1024)
-    while session_data_set.has_next():
-        print(session_data_set.next())
-
-# execute statement
-with session.execute_statement(
-    "select * from root.sg_test_01.d_01"
-) as session_data_set:
-    while session_data_set.has_next():
-        print(session_data_set.next())
-
-session.execute_statement(
-    "insert into root.sg_test_01.d_01(timestamp, s_02) values(16, 188)"
-)
-
-# insert string records of one device
-time_list = [1, 2, 3]
-measurements_list = [
-    ["s_01", "s_02", "s_03"],
-    ["s_01", "s_02", "s_03"],
-    ["s_01", "s_02", "s_03"],
-]
-values_list = [["False", "22", "33"], ["True", "1", "23"], ["False", "15", 
"26"]]
-
-session.insert_string_records_of_one_device(
-    "root.sg_test_01.d_03",
-    time_list,
-    measurements_list,
-    values_list,
-)
-
-with session.execute_raw_data_query(
-    ["root.sg_test_01.d_03.s_01", "root.sg_test_01.d_03.s_02"], 1, 4
-) as session_data_set:
-    session_data_set.set_fetch_size(1024)
-    while session_data_set.has_next():
-        print(session_data_set.next())
-
-with session.execute_last_data_query(
-    ["root.sg_test_01.d_03.s_01", "root.sg_test_01.d_03.s_02"], 0
-) as session_data_set:
-    session_data_set.set_fetch_size(1024)
-    while session_data_set.has_next():
-        print(session_data_set.next())
-
-# insert tablet with new data types
-measurements_new_type = ["s_01", "s_02", "s_03", "s_04"]
-data_types_new_type = [
-    TSDataType.DATE,
-    TSDataType.TIMESTAMP,
-    TSDataType.BLOB,
-    TSDataType.STRING,
-]
-values_new_type = [
-    [date(2024, 1, 1), 1, b"\x12\x34", "test01"],
-    [date(2024, 1, 2), 2, b"\x12\x34", "test02"],
-    [date(2024, 1, 3), 3, b"\x12\x34", "test03"],
-    [date(2024, 1, 4), 4, b"\x12\x34", "test04"],
-]
-timestamps_new_type = [1, 2, 3, 4]
-tablet_new_type = Tablet(
-    "root.sg_test_01.d_04",
-    measurements_new_type,
-    data_types_new_type,
-    values_new_type,
-    timestamps_new_type,
-)
-session.insert_tablet(tablet_new_type)
-np_values_new_type = [
-    np.array([date(2024, 2, 4), date(2024, 3, 4), date(2024, 4, 4), date(2024, 
5, 4)]),
-    np.array([5, 6, 7, 8], TSDataType.INT64.np_dtype()),
-    np.array([b"\x12\x34", b"\x12\x34", b"\x12\x34", b"\x12\x34"]),
-    np.array(["test01", "test02", "test03", "test04"]),
-]
-np_timestamps_new_type = np.array([5, 6, 7, 8], TSDataType.INT64.np_dtype())
-np_tablet_new_type = NumpyTablet(
-    "root.sg_test_01.d_04",
-    measurements_new_type,
-    data_types_new_type,
-    np_values_new_type,
-    np_timestamps_new_type,
-)
-session.insert_tablet(np_tablet_new_type)
-with session.execute_query_statement(
-    "select s_01,s_02,s_03,s_04 from root.sg_test_01.d_04"
-) as dataset:
+with session.execute_query_statement("select * from root.**") as dataset:
     print(dataset.get_column_names())
     while dataset.has_next():
         print(dataset.next())
 
-with session.execute_query_statement(
-    "select s_01,s_02,s_03,s_04 from root.sg_test_01.d_04"
-) as dataset:
+with session.execute_query_statement("select * from root.**") as dataset:
     df = dataset.todf()
     print(df.to_string())
 
-# delete database
-session.delete_storage_group("root.sg_test_01")
 
 # close session connection.
 session.close()

Reply via email to