This is an automated email from the ASF dual-hosted git repository.
morningman pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new 794a551b0f [Enhancement][fix](profile)() modify some profiles (#14074)
794a551b0f is described below
commit 794a551b0f4de731af2843bdc252882130349cf3
Author: Pxl <[email protected]>
AuthorDate: Wed Nov 9 21:59:28 2022 +0800
[Enhancement][fix](profile)() modify some profiles (#14074)
1. add RemainedDownPredicates
2. fix core dump when _scan_ranges is empty
3. fix invalid memory access on vLiteral's debug_string()
4. enlarge mv test wait time
---
be/src/vec/exec/scan/new_olap_scan_node.cpp | 9 +++++++++
be/src/vec/exprs/vliteral.cpp | 2 ++
regression-test/suites/rollup/test_materialized_view_date.groovy | 8 ++++----
3 files changed, 15 insertions(+), 4 deletions(-)
diff --git a/be/src/vec/exec/scan/new_olap_scan_node.cpp
b/be/src/vec/exec/scan/new_olap_scan_node.cpp
index a1a52a72ff..1520c834f4 100644
--- a/be/src/vec/exec/scan/new_olap_scan_node.cpp
+++ b/be/src/vec/exec/scan/new_olap_scan_node.cpp
@@ -134,6 +134,9 @@ static std::string olap_filters_to_string(const
std::vector<doris::TCondition>&
static std::string tablets_id_to_string(
const std::vector<std::unique_ptr<TPaloScanRange>>& scan_ranges) {
+ if (scan_ranges.empty()) {
+ return "[empty]";
+ }
std::stringstream ss;
ss << "[" << scan_ranges[0]->tablet_id;
for (int i = 1; i < scan_ranges.size(); ++i) {
@@ -279,6 +282,12 @@ Status
NewOlapScanNode::_init_scanners(std::list<VScanner*>* scanners) {
_eos = true;
return Status::OK();
}
+
+ if (_vconjunct_ctx_ptr && (*_vconjunct_ctx_ptr)->root()) {
+ _runtime_profile->add_info_string("RemainedDownPredicates",
+
(*_vconjunct_ctx_ptr)->root()->debug_string());
+ }
+
auto span = opentelemetry::trace::Tracer::GetCurrentSpan();
// ranges constructed from scan keys
diff --git a/be/src/vec/exprs/vliteral.cpp b/be/src/vec/exprs/vliteral.cpp
index 680023010f..4f9ad2e7a2 100644
--- a/be/src/vec/exprs/vliteral.cpp
+++ b/be/src/vec/exprs/vliteral.cpp
@@ -205,7 +205,9 @@ std::string VLiteral::debug_string() const {
switch (_type.type) {
case TYPE_BOOLEAN:
case TYPE_TINYINT:
+ out << *(reinterpret_cast<const int8_t*>(ref.data));
case TYPE_SMALLINT:
+ out << *(reinterpret_cast<const int16_t*>(ref.data));
case TYPE_INT: {
out << *(reinterpret_cast<const int32_t*>(ref.data));
break;
diff --git a/regression-test/suites/rollup/test_materialized_view_date.groovy
b/regression-test/suites/rollup/test_materialized_view_date.groovy
index e927e3c87c..ce69400f6c 100644
--- a/regression-test/suites/rollup/test_materialized_view_date.groovy
+++ b/regression-test/suites/rollup/test_materialized_view_date.groovy
@@ -37,7 +37,7 @@ suite("test_materialized_view_date", "rollup") {
DISTRIBUTED BY HASH(record_id) properties("replication_num" = "1");
"""
- int max_try_secs = 60
+ int max_try_secs = 120
sql "CREATE materialized VIEW amt_max1 AS SELECT store_id, max(sale_date1)
FROM ${tbName1} GROUP BY store_id;"
while (max_try_secs--) {
String res = getJobState(tbName1)
@@ -52,7 +52,7 @@ suite("test_materialized_view_date", "rollup") {
}
}
Thread.sleep(2)
- max_try_secs = 60
+ max_try_secs = 120
sql "CREATE materialized VIEW amt_max2 AS SELECT store_id,
max(sale_datetime1) FROM ${tbName1} GROUP BY store_id;"
while (max_try_secs--) {
String res = getJobState(tbName1)
@@ -67,7 +67,7 @@ suite("test_materialized_view_date", "rollup") {
}
}
Thread.sleep(2)
- max_try_secs = 60
+ max_try_secs = 120
sql "CREATE materialized VIEW amt_max3 AS SELECT store_id,
max(sale_datetime2) FROM ${tbName1} GROUP BY store_id;"
while (max_try_secs--) {
String res = getJobState(tbName1)
@@ -82,7 +82,7 @@ suite("test_materialized_view_date", "rollup") {
}
}
Thread.sleep(2)
- max_try_secs = 60
+ max_try_secs = 120
sql "CREATE materialized VIEW amt_max4 AS SELECT store_id,
max(sale_datetime3) FROM ${tbName1} GROUP BY store_id;"
while (max_try_secs--) {
String res = getJobState(tbName1)
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]