This is an automated email from the ASF dual-hosted git repository.

changchen pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/incubator-gluten.git


The following commit(s) were added to refs/heads/main by this push:
     new d444a16d10 [GLUTEN-1632][CH]Daily Update Clickhouse Version (20250605) 
(#9870)
d444a16d10 is described below

commit d444a16d10087d3618ad137108c888f17f93edc4
Author: Kyligence Git <[email protected]>
AuthorDate: Thu Jun 5 02:42:52 2025 -0500

    [GLUTEN-1632][CH]Daily Update Clickhouse Version (20250605) (#9870)
    
    * [GLUTEN-1632][CH]Daily Update Clickhouse Version (20250605)
    
    * Fix Build due to https://github.com/ClickHouse/ClickHouse/pull/79762
    
    * "GLUTEN-9681: test kafka data consistency" is failed on arm with spark 
3.2. We don't support it kafka source in spark 3.2, let's ignore it.
    
    ---------
    
    Co-authored-by: kyligence-git <[email protected]>
    Co-authored-by: Chang chen <[email protected]>
---
 .../gluten/execution/kafka/ClickhouseGlutenKafkaScanSuite.scala    | 2 +-
 cpp-ch/clickhouse.version                                          | 4 ++--
 cpp-ch/local-engine/Functions/SparkFunctionSplitByRegexp.cpp       | 7 ++++---
 cpp-ch/local-engine/Functions/SparkFunctionStrToMap.cpp            | 6 ++++--
 4 files changed, 11 insertions(+), 8 deletions(-)

diff --git 
a/backends-clickhouse/src-kafka/test/scala/org/apache/gluten/execution/kafka/ClickhouseGlutenKafkaScanSuite.scala
 
b/backends-clickhouse/src-kafka/test/scala/org/apache/gluten/execution/kafka/ClickhouseGlutenKafkaScanSuite.scala
index ddc8e56395..67b85e274e 100644
--- 
a/backends-clickhouse/src-kafka/test/scala/org/apache/gluten/execution/kafka/ClickhouseGlutenKafkaScanSuite.scala
+++ 
b/backends-clickhouse/src-kafka/test/scala/org/apache/gluten/execution/kafka/ClickhouseGlutenKafkaScanSuite.scala
@@ -103,7 +103,7 @@ class ClickhouseGlutenKafkaScanSuite
     }
   }
 
-  test("GLUTEN-9681: test kafka data consistency") {
+  testWithSpecifiedSparkVersion("GLUTEN-9681: test kafka data consistency", 
"3.3", "3.5") {
     withTempDir(
       dir => {
         val table_name = "data_consistency"
diff --git a/cpp-ch/clickhouse.version b/cpp-ch/clickhouse.version
index 13bc986ba6..57c19de670 100644
--- a/cpp-ch/clickhouse.version
+++ b/cpp-ch/clickhouse.version
@@ -1,3 +1,3 @@
 CH_ORG=Kyligence
-CH_BRANCH=rebase_ch/20250604
-CH_COMMIT=549ef373744
+CH_BRANCH=rebase_ch/20250605
+CH_COMMIT=fadcc98ef76
diff --git a/cpp-ch/local-engine/Functions/SparkFunctionSplitByRegexp.cpp 
b/cpp-ch/local-engine/Functions/SparkFunctionSplitByRegexp.cpp
index 1868c40c0f..0fceed2ef5 100644
--- a/cpp-ch/local-engine/Functions/SparkFunctionSplitByRegexp.cpp
+++ b/cpp-ch/local-engine/Functions/SparkFunctionSplitByRegexp.cpp
@@ -14,7 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-
+#include <ranges>
 #include <Columns/ColumnConst.h>
 #include <DataTypes/IDataType.h>
 #include <Functions/FunctionFactory.h>
@@ -22,7 +22,6 @@
 #include <Functions/FunctionTokens.h>
 #include <Functions/IFunctionAdaptors.h>
 #include <Functions/Regexps.h>
-#include <base/map.h>
 #include <Common/assert_cast.h>
 
 
@@ -195,7 +194,9 @@ public:
         if (patternIsTrivialChar(arguments))
             return FunctionFactory::instance().getImpl("splitByChar", 
context)->build(arguments);
         return std::make_unique<FunctionToFunctionBaseAdaptor>(
-            split_by_regexp, collections::map<DataTypes>(arguments, [](const 
auto & elem) { return elem.type; }), return_type);
+            split_by_regexp,
+            DataTypes{std::from_range_t{}, arguments | 
std::views::transform([](const auto & elem) { return elem.type; })},
+            return_type);
     }
 
     DataTypePtr getReturnTypeImpl(const ColumnsWithTypeAndName & arguments) 
const override
diff --git a/cpp-ch/local-engine/Functions/SparkFunctionStrToMap.cpp 
b/cpp-ch/local-engine/Functions/SparkFunctionStrToMap.cpp
index 0c4d4ea347..19955c4dab 100644
--- a/cpp-ch/local-engine/Functions/SparkFunctionStrToMap.cpp
+++ b/cpp-ch/local-engine/Functions/SparkFunctionStrToMap.cpp
@@ -15,6 +15,7 @@
  * limitations under the License.
  */
 #include <memory>
+#include <ranges>
 #include <type_traits>
 #include <Columns/ColumnConst.h>
 #include <Columns/ColumnNullable.h>
@@ -32,7 +33,6 @@
 #include <Functions/IFunction.h>
 #include <Functions/IFunctionAdaptors.h>
 #include <Functions/Regexps.h>
-#include <base/map.h>
 #include <Common/Exception.h>
 #include <Common/OptimizedRegularExpression.h>
 
@@ -349,7 +349,9 @@ public:
         else
             function_ptr = SparkFunctionStrToMap<RegularSplitter, 
RegularSplitter>::create(context);
         return std::make_unique<DB::FunctionToFunctionBaseAdaptor>(
-            function_ptr, collections::map<DB::DataTypes>(arguments, [](const 
auto & elem) { return elem.type; }), return_type);
+            function_ptr,
+            DB::DataTypes{std::from_range_t{}, arguments | 
std::views::transform([](const auto & elem) { return elem.type; })},
+            return_type);
     }
 
     DB::DataTypePtr getReturnTypeImpl(const DB::ColumnsWithTypeAndName & 
arguments) const override


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to