This is an automated email from the ASF dual-hosted git repository.
gabriellee pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new a536030979 [FOLLOWUP](load) fix nullable and add regression (#12375)
a536030979 is described below
commit a53603097998f4b17fc819eea54f79c37af26950
Author: Gabriel <[email protected]>
AuthorDate: Thu Sep 8 00:05:04 2022 +0800
[FOLLOWUP](load) fix nullable and add regression (#12375)
* [FOLLOWUP](load) fix nullable and add regression
---
.licenserc.yaml | 1 +
.../doris/load/loadv2/LoadingTaskPlanner.java | 23 +++++++-
.../stream_load/load_nullable_to_not_nullable.out | 4 ++
.../suites/load_p0/stream_load/data/test_time.data | 1 +
.../load_nullable_to_not_nullable.groovy | 63 ++++++++++++++++++++++
5 files changed, 91 insertions(+), 1 deletion(-)
diff --git a/.licenserc.yaml b/.licenserc.yaml
index 595deb665f..a99ab7eeb8 100644
--- a/.licenserc.yaml
+++ b/.licenserc.yaml
@@ -62,5 +62,6 @@ header:
- "docs/.vuepress/public/js/wow.min.js"
- "docs/package-lock.json"
- "regression-test/script/README"
+ - "regression-test/suites/load_p0/stream_load/data"
comment: on-failure
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/LoadingTaskPlanner.java
b/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/LoadingTaskPlanner.java
index f78476f583..3796040f21 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/LoadingTaskPlanner.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/load/loadv2/LoadingTaskPlanner.java
@@ -20,6 +20,7 @@ package org.apache.doris.load.loadv2;
import org.apache.doris.analysis.Analyzer;
import org.apache.doris.analysis.BrokerDesc;
import org.apache.doris.analysis.DescriptorTable;
+import org.apache.doris.analysis.ImportColumnDesc;
import org.apache.doris.analysis.SlotDescriptor;
import org.apache.doris.analysis.TupleDescriptor;
import org.apache.doris.analysis.UserIdentity;
@@ -108,17 +109,37 @@ public class LoadingTaskPlanner {
throws UserException {
// Generate tuple descriptor
TupleDescriptor destTupleDesc = descTable.createTupleDescriptor();
+ TupleDescriptor scanTupleDesc =
descTable.createTupleDescriptor("ScanTuple");
// use full schema to fill the descriptor table
for (Column col : table.getFullSchema()) {
SlotDescriptor slotDesc =
descTable.addSlotDescriptor(destTupleDesc);
slotDesc.setIsMaterialized(true);
slotDesc.setColumn(col);
slotDesc.setIsNullable(col.isAllowNull());
+
+ SlotDescriptor scanSlotDesc =
descTable.addSlotDescriptor(scanTupleDesc);
+ scanSlotDesc.setIsMaterialized(true);
+ scanSlotDesc.setColumn(col);
+ scanSlotDesc.setIsNullable(col.isAllowNull());
+ if (fileGroups.size() > 0) {
+ for (ImportColumnDesc importColumnDesc :
fileGroups.get(0).getColumnExprList()) {
+ try {
+ if (!importColumnDesc.isColumn() &&
importColumnDesc.getColumnName() != null
+ &&
importColumnDesc.getColumnName().equals(col.getName())) {
+
scanSlotDesc.setIsNullable(importColumnDesc.getExpr().isNullable());
+ break;
+ }
+ } catch (Exception e) {
+ // An exception may be thrown here because the
`importColumnDesc.getExpr()` is not analyzed
+ // now. We just skip this case here.
+ }
+ }
+ }
}
// Generate plan trees
// 1. Broker scan node
- BrokerScanNode scanNode = new BrokerScanNode(new
PlanNodeId(nextNodeId++), destTupleDesc, "BrokerScanNode",
+ BrokerScanNode scanNode = new BrokerScanNode(new
PlanNodeId(nextNodeId++), scanTupleDesc, "BrokerScanNode",
fileStatusesList, filesAdded);
scanNode.setLoadInfo(loadJobId, txnId, table, brokerDesc, fileGroups,
strictMode, loadParallelism, userInfo);
scanNode.init(analyzer);
diff --git
a/regression-test/data/load_p0/stream_load/load_nullable_to_not_nullable.out
b/regression-test/data/load_p0/stream_load/load_nullable_to_not_nullable.out
new file mode 100644
index 0000000000..e0838887b9
--- /dev/null
+++ b/regression-test/data/load_p0/stream_load/load_nullable_to_not_nullable.out
@@ -0,0 +1,4 @@
+-- This file is automatically generated. You should know what you did if you
want to edit this
+-- !sql --
+2019 9 9 9 7.7 a 2019-09-09
1970-01-01T08:33:39 k7 9.0 9.0
+
diff --git a/regression-test/suites/load_p0/stream_load/data/test_time.data
b/regression-test/suites/load_p0/stream_load/data/test_time.data
new file mode 100644
index 0000000000..7d0430441b
--- /dev/null
+++ b/regression-test/suites/load_p0/stream_load/data/test_time.data
@@ -0,0 +1 @@
+2019-09-09T10:10:10
\ No newline at end of file
diff --git
a/regression-test/suites/load_p0/stream_load/load_nullable_to_not_nullable.groovy
b/regression-test/suites/load_p0/stream_load/load_nullable_to_not_nullable.groovy
new file mode 100644
index 0000000000..5f410e2ee9
--- /dev/null
+++
b/regression-test/suites/load_p0/stream_load/load_nullable_to_not_nullable.groovy
@@ -0,0 +1,63 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+import org.codehaus.groovy.runtime.IOGroovyMethods
+
+import java.nio.charset.StandardCharsets
+
+suite("load_nullable_to_not_nullable") {
+ def tableName = "load_nullable_to_not_nullable"
+ def dbName = "test_query_db"
+ sql "CREATE DATABASE IF NOT EXISTS ${dbName}"
+ sql "USE $dbName"
+ sql "DROP TABLE IF EXISTS ${tableName} "
+ sql """
+ CREATE TABLE `${tableName}` (
+ k1 int(32) NOT NULL,
+ k2 smallint NOT NULL,
+ k3 int NOT NULL,
+ k4 bigint NOT NULL,
+ k5 decimal(9, 3) NOT NULL,
+ k6 char(5) NOT NULL,
+ k10 date NOT NULL,
+ k11 datetime NOT NULL,
+ k7 varchar(20) NOT NULL,
+ k8 double max NOT NULL,
+ k9 float sum NOT NULL )
+ AGGREGATE KEY(k1,k2,k3,k4,k5,k6,k10,k11,k7)
+ PARTITION BY RANGE(k2) (
+ PARTITION partition_a VALUES LESS THAN MAXVALUE
+ )
+ DISTRIBUTED BY HASH(k1, k2, k5)
+ BUCKETS 3
+ PROPERTIES ( "replication_allocation" = "tag.location.default: 1");
+ """
+
+ StringBuilder commandBuilder = new StringBuilder()
+ commandBuilder.append("""curl -v --location-trusted -u
${context.config.feHttpUser}:${context.config.feHttpPassword}""")
+ commandBuilder.append(""" -H
columns:col,k1=year(col),k2=month(col),k3=month(col),k4=day(col),k5=7.7,k6='a',k10=date(col),k11=FROM_UNIXTIME(2019,'%Y-%m-%dT%H:%i:%s'),k7='k7',k8=month(col),k9=day(col)
-T ${context.file.parent}/data/test_time.data
http://${context.config.feHttpAddress}/api/""" + dbName + "/" + tableName +
"/_stream_load")
+ String command = commandBuilder.toString()
+ def process = command.execute()
+ int code = process.waitFor()
+ String err = IOGroovyMethods.getText(new BufferedReader(new
InputStreamReader(process.getErrorStream())));
+ String out = process.getText()
+ logger.info("Run command: command=" + command + ",code=" + code + ", out="
+ out + ", err=" + err)
+ assertEquals(code, 0)
+ qt_sql " SELECT * FROM ${tableName} "
+ sql "DROP TABLE ${tableName} "
+}
+
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]