This is an automated email from the ASF dual-hosted git repository.
yiguolei pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new dc88e558a2e [bug](function) fix array_map coredump when no rows
(#50913)
dc88e558a2e is described below
commit dc88e558a2e1d03fb35d9d127e09de6ae233d17c
Author: zhangstar333 <[email protected]>
AuthorDate: Sun May 18 09:27:55 2025 +0800
[bug](function) fix array_map coredump when no rows (#50913)
### What problem does this PR solve?
Problem Summary:
when block is empty, it's also should execute once to insert empty
result column.
---
.../exprs/lambda_function/varray_map_function.cpp | 6 ++---
.../array_functions/test_array_map_function.out | Bin 2417 -> 2522 bytes
.../array_functions/test_array_map_function.groovy | 25 +++++++++++++++++++++
3 files changed, 28 insertions(+), 3 deletions(-)
diff --git a/be/src/vec/exprs/lambda_function/varray_map_function.cpp
b/be/src/vec/exprs/lambda_function/varray_map_function.cpp
index ab36a36017e..e292f2c3cde 100644
--- a/be/src/vec/exprs/lambda_function/varray_map_function.cpp
+++ b/be/src/vec/exprs/lambda_function/varray_map_function.cpp
@@ -199,7 +199,7 @@ public:
Block lambda_block;
auto column_size = names.size();
MutableColumns columns(column_size);
- while (args_info.current_row_idx < block->rows()) {
+ do {
bool mem_reuse = lambda_block.mem_reuse();
for (int i = 0; i < column_size; i++) {
if (mem_reuse) {
@@ -222,7 +222,7 @@ public:
long current_step = std::min(
max_step, (long)(args_info.cur_size -
args_info.current_offset_in_array));
size_t pos = args_info.array_start +
args_info.current_offset_in_array;
- for (int i = 0; i < arguments.size(); ++i) {
+ for (int i = 0; i < arguments.size() && current_step > 0; ++i)
{
columns[gap + i]->insert_range_from(*lambda_datas[i], pos,
current_step);
}
args_info.current_offset_in_array += current_step;
@@ -265,7 +265,7 @@ public:
}
result_col->insert_range_from(*res_col, 0, res_col->size());
lambda_block.clear_column_data(column_size);
- }
+ } while (args_info.current_row_idx < block->rows());
//4. get the result column after execution, reassemble it into a new
array column, and return.
ColumnWithTypeAndName result_arr;
diff --git
a/regression-test/data/query_p0/sql_functions/array_functions/test_array_map_function.out
b/regression-test/data/query_p0/sql_functions/array_functions/test_array_map_function.out
index 39649ca512e..211092f3875 100644
Binary files
a/regression-test/data/query_p0/sql_functions/array_functions/test_array_map_function.out
and
b/regression-test/data/query_p0/sql_functions/array_functions/test_array_map_function.out
differ
diff --git
a/regression-test/suites/query_p0/sql_functions/array_functions/test_array_map_function.groovy
b/regression-test/suites/query_p0/sql_functions/array_functions/test_array_map_function.groovy
index 17de93942bf..a9ed82f3818 100644
---
a/regression-test/suites/query_p0/sql_functions/array_functions/test_array_map_function.groovy
+++
b/regression-test/suites/query_p0/sql_functions/array_functions/test_array_map_function.groovy
@@ -103,4 +103,29 @@ suite("test_array_map_function") {
}
sql "DROP TABLE IF EXISTS ${tableName}"
+
+ sql """ CREATE TABLE IF NOT EXISTS array_map_test (
+ id INT,
+ int_array ARRAY<INT>,
+ string_array ARRAY<STRING>,
+ double_array ARRAY<DOUBLE>,
+ nested_array ARRAY<ARRAY<INT>>,
+ nullable_array ARRAY<INT> NULL
+ ) ENGINE=OLAP
+ DUPLICATE KEY(id)
+ DISTRIBUTED BY HASH(id) BUCKETS 10
+ PROPERTIES (
+ "replication_num" = "1"
+ );
+ """
+ sql """ INSERT INTO array_map_test VALUES
+ (1, [1,2,3], ['a','b','c'], [1.1,2.2,3.3], [[1,2],[3,4]], NULL),
+ (2, [10,20], ['x','y'], [10.5,20.5], [[5,6],[7,8]], [1,2,3]),
+ (3, [], [], [], [], []),
+ (4, [100,200,300], ['one','two','three'], [100.1,200.2,300.3],
[[9,10],[11,12]], [4,5,6]),
+ (5, [5], ['single'], [5.5], [[13]], [7]);
+ """
+ qt_select_25 """
+ SELECT id, array_map(x -> array_map(y -> y * 10, x), nested_array)
FROM array_map_test order by id;
+ """
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]