This is an automated email from the ASF dual-hosted git repository.

changchen pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-gluten.git


The following commit(s) were added to refs/heads/main by this push:
     new 4a674e5e8 [GLUTEN-1632][CH]Daily Update Clickhouse Version (20240705) 
(#6338)
4a674e5e8 is described below

commit 4a674e5e8ab757b7699f8bc75377e67fe793ed17
Author: Kyligence Git <[email protected]>
AuthorDate: Fri Jul 5 02:22:52 2024 -0500

    [GLUTEN-1632][CH]Daily Update Clickhouse Version (20240705) (#6338)
    
    * [GLUTEN-1632][CH]Daily Update Clickhouse Version (20240705)
    
    * Fix build due to https://github.com/ClickHouse/ClickHouse/pull/61601
    
    ---------
    
    Co-authored-by: kyligence-git <[email protected]>
    Co-authored-by: Chang Chen <[email protected]>
---
 cpp-ch/clickhouse.version                                       | 4 ++--
 cpp-ch/local-engine/Operator/DefaultHashAggregateResult.cpp     | 6 +++---
 cpp-ch/local-engine/Storages/Mergetree/SparkMergeTreeWriter.cpp | 7 +++----
 cpp-ch/local-engine/Storages/Mergetree/SparkMergeTreeWriter.h   | 2 +-
 cpp-ch/local-engine/Storages/SourceFromJavaIter.cpp             | 4 ++--
 cpp-ch/local-engine/tests/gtest_parser.cpp                      | 3 ++-
 6 files changed, 13 insertions(+), 13 deletions(-)

diff --git a/cpp-ch/clickhouse.version b/cpp-ch/clickhouse.version
index 0fb13497d..92bf886e9 100644
--- a/cpp-ch/clickhouse.version
+++ b/cpp-ch/clickhouse.version
@@ -1,4 +1,4 @@
 CH_ORG=Kyligence
-CH_BRANCH=rebase_ch/20240704
-CH_COMMIT=f617655ccea
+CH_BRANCH=rebase_ch/20240705
+CH_COMMIT=531a87ed802
 
diff --git a/cpp-ch/local-engine/Operator/DefaultHashAggregateResult.cpp 
b/cpp-ch/local-engine/Operator/DefaultHashAggregateResult.cpp
index 35f891581..fbad02fda 100644
--- a/cpp-ch/local-engine/Operator/DefaultHashAggregateResult.cpp
+++ b/cpp-ch/local-engine/Operator/DefaultHashAggregateResult.cpp
@@ -116,7 +116,7 @@ public:
             has_input = true;
             output_chunk = DB::Chunk(result_cols, 1);
             auto info = std::make_shared<DB::AggregatedChunkInfo>();
-            output_chunk.setChunkInfo(info);
+            output_chunk.getChunkInfos().add(std::move(info));
             return Status::Ready;
         }
 
@@ -124,10 +124,10 @@ public:
         if (input.hasData())
         {
             output_chunk = input.pull(true);
-            if (!output_chunk.hasChunkInfo())
+            if (output_chunk.getChunkInfos().empty())
             {
                 auto info = std::make_shared<DB::AggregatedChunkInfo>();
-                output_chunk.setChunkInfo(info);
+                output_chunk.getChunkInfos().add(std::move(info));
             }
             has_input = true;
             return Status::Ready;
diff --git a/cpp-ch/local-engine/Storages/Mergetree/SparkMergeTreeWriter.cpp 
b/cpp-ch/local-engine/Storages/Mergetree/SparkMergeTreeWriter.cpp
index 406f2aaa2..2f673fc38 100644
--- a/cpp-ch/local-engine/Storages/Mergetree/SparkMergeTreeWriter.cpp
+++ b/cpp-ch/local-engine/Storages/Mergetree/SparkMergeTreeWriter.cpp
@@ -121,12 +121,11 @@ void SparkMergeTreeWriter::write(const DB::Block & block)
         checkAndMerge();
 }
 
-bool SparkMergeTreeWriter::chunkToPart(Chunk && chunk)
+bool SparkMergeTreeWriter::chunkToPart(Chunk && plan_chunk)
 {
-    if (chunk.hasChunkInfo())
+    if (Chunk result_chunk = DB::Squashing::squash(std::move(plan_chunk)))
     {
-        Chunk squash_chunk = DB::Squashing::squash(std::move(chunk));
-        Block result = header.cloneWithColumns(squash_chunk.getColumns());
+        auto result = 
squashing->getHeader().cloneWithColumns(result_chunk.detachColumns());
         return blockToPart(result);
     }
     return false;
diff --git a/cpp-ch/local-engine/Storages/Mergetree/SparkMergeTreeWriter.h 
b/cpp-ch/local-engine/Storages/Mergetree/SparkMergeTreeWriter.h
index 13ac22394..269b0352c 100644
--- a/cpp-ch/local-engine/Storages/Mergetree/SparkMergeTreeWriter.h
+++ b/cpp-ch/local-engine/Storages/Mergetree/SparkMergeTreeWriter.h
@@ -77,7 +77,7 @@ private:
     void saveMetadata();
     void commitPartToRemoteStorageIfNeeded();
     void finalizeMerge();
-    bool chunkToPart(Chunk && chunk);
+    bool chunkToPart(Chunk && plan_chunk);
     bool blockToPart(Block & block);
     bool useLocalStorage() const;
 
diff --git a/cpp-ch/local-engine/Storages/SourceFromJavaIter.cpp 
b/cpp-ch/local-engine/Storages/SourceFromJavaIter.cpp
index 37501e985..1c5902c8c 100644
--- a/cpp-ch/local-engine/Storages/SourceFromJavaIter.cpp
+++ b/cpp-ch/local-engine/Storages/SourceFromJavaIter.cpp
@@ -109,13 +109,13 @@ DB::Chunk SourceFromJavaIter::generate()
             auto info = std::make_shared<DB::AggregatedChunkInfo>();
             info->is_overflows = data->info.is_overflows;
             info->bucket_num = data->info.bucket_num;
-            result.setChunkInfo(info);
+            result.getChunkInfos().add(std::move(info));
         }
         else
         {
             result = BlockUtil::buildRowCountChunk(rows);
             auto info = std::make_shared<DB::AggregatedChunkInfo>();
-            result.setChunkInfo(info);
+            result.getChunkInfos().add(std::move(info));
         }
     }
     return result;
diff --git a/cpp-ch/local-engine/tests/gtest_parser.cpp 
b/cpp-ch/local-engine/tests/gtest_parser.cpp
index 24c796358..34b3a8875 100644
--- a/cpp-ch/local-engine/tests/gtest_parser.cpp
+++ b/cpp-ch/local-engine/tests/gtest_parser.cpp
@@ -101,7 +101,8 @@ TEST(LocalExecutor, StorageObjectStorageSink)
 
     /// 2. Create Chunk
     /// 3. comsume
-    sink.consume(testChunk());
+    Chunk data = testChunk();
+    sink.consume(data);
     sink.onFinish();
 }
 


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to