This is an automated email from the ASF dual-hosted git repository.
morningman pushed a commit to branch branch-3.1
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-3.1 by this push:
new 6299866a21d branch-3.1: [fix](paimon) get the correct schema when
querying paimon table #55070 #55364 (#55404)
6299866a21d is described below
commit 6299866a21d636a69bf11b6ae7c60c5802909ce1
Author: Socrates <[email protected]>
AuthorDate: Thu Aug 28 14:15:57 2025 +0800
branch-3.1: [fix](paimon) get the correct schema when querying paimon table
#55070 #55364 (#55404)
bp:
- get the correct schema when querying paimon table #55070
- [fix](multi-catalog) check paramType for TableScanParams #55364
---
.../create_preinstalled_scripts/paimon/run01.sql | 10 ++-
.../org/apache/doris/analysis/TableScanParams.java | 19 +++++-
.../org/apache/doris/common/util/TimeUtils.java | 12 ++++
.../datasource/paimon/PaimonExternalTable.java | 29 ++++++--
.../datasource/paimon/PaimonMetadataCache.java | 6 +-
.../apache/doris/datasource/paimon/PaimonUtil.java | 76 +++++++++++++++++++++
.../datasource/paimon/source/PaimonScanNode.java | 23 +++----
.../apache/doris/job/extensions/mtmv/MTMVTask.java | 2 +-
.../test_iceberg_schema_change_with_branch_tag.out | Bin 0 -> 320 bytes
.../paimon/test_paimon_schema_change.out | Bin 4751 -> 5013 bytes
.../iceberg/iceberg_query_tag_branch.groovy | 5 ++
...st_iceberg_schema_change_with_branch_tag.groovy | 67 ++++++++++++++++++
.../paimon/paimon_incr_read.groovy | 2 +-
.../paimon/paimon_time_travel.groovy | 11 ++-
.../paimon/test_paimon_schema_change.groovy | 10 ++-
15 files changed, 240 insertions(+), 32 deletions(-)
diff --git
a/docker/thirdparties/docker-compose/iceberg/scripts/create_preinstalled_scripts/paimon/run01.sql
b/docker/thirdparties/docker-compose/iceberg/scripts/create_preinstalled_scripts/paimon/run01.sql
index 422855d9872..7821cc46e0e 100644
---
a/docker/thirdparties/docker-compose/iceberg/scripts/create_preinstalled_scripts/paimon/run01.sql
+++
b/docker/thirdparties/docker-compose/iceberg/scripts/create_preinstalled_scripts/paimon/run01.sql
@@ -76,4 +76,12 @@ CREATE TABLE date_partition (
'file.format'='orc'
);
-insert into date_partition values(1,date '2020-01-01');
\ No newline at end of file
+insert into date_partition values(1,date '2020-01-01');
+
+drop table if exists test_schema_change;
+create table test_schema_change;
+alter table test_schema_change add column id int;
+insert into test_schema_change values(1);
+CALL sys.create_tag(table => 'test_schema_change', tag => 'tag1', snapshot =>
1);
+CALL sys.create_branch('test_schema_change', 'branch1', 'tag1');
+alter table test_schema_change add column name string;
\ No newline at end of file
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/analysis/TableScanParams.java
b/fe/fe-core/src/main/java/org/apache/doris/analysis/TableScanParams.java
index 77f00af9fa5..a73b208a611 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/TableScanParams.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/TableScanParams.java
@@ -18,6 +18,7 @@
package org.apache.doris.analysis;
import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
import org.bouncycastle.util.Strings;
import java.util.List;
@@ -25,9 +26,13 @@ import java.util.Map;
public class TableScanParams {
public static final String PARAMS_NAME = "name";
- public static String INCREMENTAL_READ = "incr";
- public static String BRANCH = "branch";
- public static String TAG = "tag";
+ public static final String INCREMENTAL_READ = "incr";
+ public static final String BRANCH = "branch";
+ public static final String TAG = "tag";
+ private static final ImmutableSet<String> VALID_PARAM_TYPES =
ImmutableSet.of(
+ INCREMENTAL_READ,
+ BRANCH,
+ TAG);
private final String paramType;
// There are two ways to pass parameters to a function.
@@ -38,10 +43,18 @@ public class TableScanParams {
private final Map<String, String> mapParams;
private final List<String> listParams;
+ private void validate() {
+ if (!VALID_PARAM_TYPES.contains(paramType)) {
+ throw new IllegalArgumentException("Invalid param type: " +
paramType);
+ }
+ // TODO: validate mapParams and listParams for different param types
+ }
+
public TableScanParams(String paramType, Map<String, String> mapParams,
List<String> listParams) {
this.paramType = Strings.toLowerCase(paramType);
this.mapParams = mapParams == null ? ImmutableMap.of() :
ImmutableMap.copyOf(mapParams);
this.listParams = listParams;
+ validate();
}
public List<String> getListParams() {
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/common/util/TimeUtils.java
b/fe/fe-core/src/main/java/org/apache/doris/common/util/TimeUtils.java
index 8d019be68b7..d4a5c51a50e 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/common/util/TimeUtils.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/common/util/TimeUtils.java
@@ -292,6 +292,18 @@ public class TimeUtils {
return d.atZone(timeZone.toZoneId()).toInstant().toEpochMilli();
}
+ public static long msTimeStringToLong(String timeStr, TimeZone timeZone) {
+ DateTimeFormatter dateFormatTimeZone =
getDatetimeMsFormatWithTimeZone();
+ dateFormatTimeZone.withZone(timeZone.toZoneId());
+ LocalDateTime d;
+ try {
+ d = LocalDateTime.parse(timeStr, dateFormatTimeZone);
+ } catch (DateTimeParseException e) {
+ return -1;
+ }
+ return d.atZone(timeZone.toZoneId()).toInstant().toEpochMilli();
+ }
+
// Check if the time zone_value is valid
public static String checkTimeZoneValidAndStandardize(String value) throws
DdlException {
Function<String, String> standardizeValue = s -> {
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalTable.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalTable.java
index bdb45a0eb9f..15f83d0636c 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalTable.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonExternalTable.java
@@ -54,6 +54,7 @@ import com.google.common.collect.Sets;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.paimon.CoreOptions;
+import org.apache.paimon.Snapshot;
import org.apache.paimon.partition.Partition;
import org.apache.paimon.schema.TableSchema;
import org.apache.paimon.table.DataTable;
@@ -98,10 +99,28 @@ public class PaimonExternalTable extends ExternalTable
implements MTMVRelatedTab
return getOrFetchSnapshotCacheValue(snapshot).getSnapshot().getTable();
}
- private PaimonSnapshotCacheValue getPaimonSnapshotCacheValue() {
+ private PaimonSnapshotCacheValue
getPaimonSnapshotCacheValue(Optional<TableSnapshot> tableSnapshot,
+ Optional<TableScanParams> scanParams) {
makeSureInitialized();
- return
Env.getCurrentEnv().getExtMetaCacheMgr().getPaimonMetadataCache()
- .getPaimonSnapshot(this);
+ if (tableSnapshot.isPresent() || (scanParams.isPresent() &&
scanParams.get().isTag())) {
+ // If a snapshot is specified,
+ // use the specified snapshot and the corresponding schema(not the
latest
+ // schema).
+ try {
+ Snapshot snapshot = PaimonUtil.getPaimonSnapshot(paimonTable,
tableSnapshot, scanParams);
+ return new PaimonSnapshotCacheValue(PaimonPartitionInfo.EMPTY,
+ new PaimonSnapshot(snapshot.id(), snapshot.schemaId(),
paimonTable));
+ } catch (Exception e) {
+ LOG.warn("Failed to get Paimon snapshot for table {}",
paimonTable.name(), e);
+ throw new RuntimeException(
+ "Failed to get Paimon snapshot: " + (e.getMessage() ==
null ? "unknown cause" : e.getMessage()),
+ e);
+ }
+ } else {
+ // Otherwise, use the latest snapshot and the latest schema.
+ return
Env.getCurrentEnv().getExtMetaCacheMgr().getPaimonMetadataCache()
+ .getPaimonSnapshot(this);
+ }
}
@Override
@@ -216,7 +235,7 @@ public class PaimonExternalTable extends ExternalTable
implements MTMVRelatedTab
@Override
public MvccSnapshot loadSnapshot(Optional<TableSnapshot> tableSnapshot,
Optional<TableScanParams> scanParams) {
- return new PaimonMvccSnapshot(getPaimonSnapshotCacheValue());
+ return new
PaimonMvccSnapshot(getPaimonSnapshotCacheValue(tableSnapshot, scanParams));
}
@Override
@@ -278,7 +297,7 @@ public class PaimonExternalTable extends ExternalTable
implements MTMVRelatedTab
if (snapshot.isPresent()) {
return ((PaimonMvccSnapshot)
snapshot.get()).getSnapshotCacheValue();
} else {
- return getPaimonSnapshotCacheValue();
+ return getPaimonSnapshotCacheValue(Optional.empty(),
Optional.empty());
}
}
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonMetadataCache.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonMetadataCache.java
index 16cc042d580..dc387e54816 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonMetadataCache.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonMetadataCache.java
@@ -36,6 +36,8 @@ import org.apache.commons.collections.CollectionUtils;
import org.apache.paimon.CoreOptions;
import org.apache.paimon.Snapshot;
import org.apache.paimon.partition.Partition;
+import org.apache.paimon.schema.TableSchema;
+import org.apache.paimon.table.DataTable;
import org.apache.paimon.table.Table;
import org.jetbrains.annotations.NotNull;
@@ -115,15 +117,15 @@ public class PaimonMetadataCache {
Table snapshotTable = table;
// snapshotId and schemaId
Long latestSnapshotId = PaimonSnapshot.INVALID_SNAPSHOT_ID;
- long latestSchemaId = 0L;
Optional<Snapshot> optionalSnapshot = table.latestSnapshot();
if (optionalSnapshot.isPresent()) {
latestSnapshotId = optionalSnapshot.get().id();
- latestSchemaId = table.snapshot(latestSnapshotId).schemaId();
snapshotTable =
table.copy(
Collections.singletonMap(CoreOptions.SCAN_SNAPSHOT_ID.key(),
latestSnapshotId.toString()));
}
+ DataTable dataTable = (DataTable) table;
+ long latestSchemaId =
dataTable.schemaManager().latest().map(TableSchema::id).orElse(0L);
return new PaimonSnapshot(latestSnapshotId, latestSchemaId,
snapshotTable);
}
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonUtil.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonUtil.java
index 72f5adad999..eca3a66f552 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonUtil.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/PaimonUtil.java
@@ -28,6 +28,7 @@ import org.apache.doris.catalog.ScalarType;
import org.apache.doris.catalog.Type;
import org.apache.doris.common.AnalysisException;
import org.apache.doris.common.UserException;
+import org.apache.doris.common.util.TimeUtils;
import org.apache.doris.datasource.ExternalTable;
import org.apache.doris.datasource.hive.HiveUtil;
import org.apache.doris.datasource.paimon.source.PaimonSource;
@@ -49,6 +50,7 @@ import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.paimon.CoreOptions;
import org.apache.paimon.CoreOptions.StartupMode;
+import org.apache.paimon.Snapshot;
import org.apache.paimon.data.BinaryRow;
import org.apache.paimon.data.InternalRow;
import org.apache.paimon.data.Timestamp;
@@ -58,9 +60,11 @@ import org.apache.paimon.partition.Partition;
import org.apache.paimon.predicate.Predicate;
import org.apache.paimon.reader.RecordReader;
import org.apache.paimon.schema.TableSchema;
+import org.apache.paimon.table.DataTable;
import org.apache.paimon.table.FileStoreTable;
import org.apache.paimon.table.Table;
import org.apache.paimon.table.source.ReadBuilder;
+import org.apache.paimon.tag.Tag;
import org.apache.paimon.types.ArrayType;
import org.apache.paimon.types.CharType;
import org.apache.paimon.types.DataField;
@@ -75,8 +79,10 @@ import org.apache.paimon.utils.Pair;
import org.apache.paimon.utils.Projection;
import org.apache.paimon.utils.RowDataToObjectArrayConverter;
+import java.io.FileNotFoundException;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
+import java.time.DateTimeException;
import java.time.LocalDate;
import java.time.LocalTime;
import java.time.ZoneId;
@@ -87,6 +93,7 @@ import java.util.Base64;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import java.util.Optional;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.annotation.Nullable;
@@ -577,6 +584,75 @@ public class PaimonUtil {
return baseTable.copy(options);
}
+ // get snapshot info from query like 'for version/time as of' or '@tag'
+ public static Snapshot getPaimonSnapshot(Table table,
Optional<TableSnapshot> querySnapshot,
+ Optional<TableScanParams> scanParams) throws UserException {
+ Preconditions.checkArgument(querySnapshot.isPresent() ||
(scanParams.isPresent() && scanParams.get().isTag()),
+ "should spec version or time or tag");
+ Preconditions.checkArgument(!(querySnapshot.isPresent() &&
scanParams.isPresent()),
+ "should not spec both snapshot and scan params");
+
+ DataTable dataTable = (DataTable) table;
+ if (querySnapshot.isPresent()) {
+ return getPaimonSnapshotByTableSnapshot(dataTable,
querySnapshot.get());
+ } else if (scanParams.isPresent() && scanParams.get().isTag()) {
+ return getPaimonSnapshotByTag(dataTable,
extractBranchOrTagName(scanParams.get()));
+ } else {
+ throw new UserException("should spec version or time or tag");
+ }
+ }
+
+ private static Snapshot getPaimonSnapshotByTableSnapshot(DataTable table,
TableSnapshot tableSnapshot)
+ throws UserException {
+ final String value = tableSnapshot.getValue();
+ final TableSnapshot.VersionType type = tableSnapshot.getType();
+ final boolean isDigital = DIGITAL_REGEX.matcher(value).matches();
+
+ switch (type) {
+ case TIME:
+ return getPaimonSnapshotByTimestamp(table, value, isDigital);
+ case VERSION:
+ return isDigital ? getPaimonSnapshotBySnapshotId(table, value)
: getPaimonSnapshotByTag(table, value);
+ default:
+ throw new UserException("Unsupported snapshot type: " + type);
+ }
+ }
+
+ private static Snapshot getPaimonSnapshotByTimestamp(DataTable table,
String timestamp, boolean isDigital)
+ throws UserException {
+ long timestampMillis = 0;
+ if (isDigital) {
+ timestampMillis = Long.parseLong(timestamp);
+ } else {
+ timestampMillis = TimeUtils.msTimeStringToLong(timestamp,
TimeUtils.getTimeZone());
+ if (timestampMillis < 0) {
+ throw new DateTimeException("can't parse time: " + timestamp);
+ }
+ }
+ Snapshot snapshot =
table.snapshotManager().earlierOrEqualTimeMills(timestampMillis);
+ if (snapshot == null) {
+ throw new UserException("can't find snapshot older than : " +
timestamp);
+ }
+ return snapshot;
+ }
+
+ private static Snapshot getPaimonSnapshotBySnapshotId(DataTable table,
String snapshotString)
+ throws UserException {
+ long snapshotId = Long.parseLong(snapshotString);
+ try {
+ Snapshot snapshot =
table.snapshotManager().tryGetSnapshot(snapshotId);
+ return snapshot;
+ } catch (FileNotFoundException e) {
+ throw new UserException("can't find snapshot by id: " +
snapshotId, e);
+ }
+ }
+
+ private static Snapshot getPaimonSnapshotByTag(DataTable table, String
tagName)
+ throws UserException {
+ Optional<Tag> tag = table.tagManager().get(tagName);
+ return tag.orElseThrow(() -> new UserException("can't find snapshot by
tag: " + tagName));
+ }
+
/**
* Creates a map of conflicting Paimon options with null values for
exclusion.
*
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/source/PaimonScanNode.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/source/PaimonScanNode.java
index a4b2902eb4e..7dba764d621 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/source/PaimonScanNode.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/paimon/source/PaimonScanNode.java
@@ -49,7 +49,6 @@ import org.apache.doris.thrift.TTableFormatFileDesc;
import com.google.common.annotations.VisibleForTesting;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
-import org.apache.paimon.CoreOptions;
import org.apache.paimon.data.BinaryRow;
import org.apache.paimon.predicate.Predicate;
import org.apache.paimon.schema.TableSchema;
@@ -383,20 +382,16 @@ public class PaimonScanNode extends FileQueryScanNode {
@VisibleForTesting
public List<org.apache.paimon.table.source.Split> getPaimonSplitFromAPI()
throws UserException {
- if
(!source.getPaimonTable().options().containsKey(CoreOptions.SCAN_SNAPSHOT_ID.key()))
{
- // an empty table in PaimonSnapshotCacheValue
- return Collections.emptyList();
- }
+ Table paimonTable = getProcessedTable();
int[] projected = desc.getSlots().stream().mapToInt(
- slot -> source.getPaimonTable().rowType()
- .getFieldNames()
- .stream()
- .map(String::toLowerCase)
- .collect(Collectors.toList())
- .indexOf(slot.getColumn().getName()))
+ slot -> paimonTable.rowType()
+ .getFieldNames()
+ .stream()
+ .map(String::toLowerCase)
+ .collect(Collectors.toList())
+ .indexOf(slot.getColumn().getName()))
+ .filter(i -> i >= 0)
.toArray();
-
- Table paimonTable = getProcessedTable();
ReadBuilder readBuilder = paimonTable.newReadBuilder();
return readBuilder.withFilter(predicates)
.withProjection(projected)
@@ -712,5 +707,3 @@ public class PaimonScanNode extends FileQueryScanNode {
return baseTable;
}
}
-
-
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/job/extensions/mtmv/MTMVTask.java
b/fe/fe-core/src/main/java/org/apache/doris/job/extensions/mtmv/MTMVTask.java
index 4df48187cb3..d0ec0f47ede 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/job/extensions/mtmv/MTMVTask.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/job/extensions/mtmv/MTMVTask.java
@@ -469,7 +469,7 @@ public class MTMVTask extends AbstractTask {
}
if (tableIf instanceof MvccTable) {
MvccTable mvccTable = (MvccTable) tableIf;
- MvccSnapshot mvccSnapshot =
mvccTable.loadSnapshot(Optional.empty(), null);
+ MvccSnapshot mvccSnapshot =
mvccTable.loadSnapshot(Optional.empty(), Optional.empty());
snapshots.put(new MvccTableInfo(mvccTable), mvccSnapshot);
}
}
diff --git
a/regression-test/data/external_table_p0/iceberg/test_iceberg_schema_change_with_branch_tag.out
b/regression-test/data/external_table_p0/iceberg/test_iceberg_schema_change_with_branch_tag.out
new file mode 100644
index 00000000000..97e995bc6dc
Binary files /dev/null and
b/regression-test/data/external_table_p0/iceberg/test_iceberg_schema_change_with_branch_tag.out
differ
diff --git
a/regression-test/data/external_table_p0/paimon/test_paimon_schema_change.out
b/regression-test/data/external_table_p0/paimon/test_paimon_schema_change.out
index fed740a8803..90b56b63a3d 100644
Binary files
a/regression-test/data/external_table_p0/paimon/test_paimon_schema_change.out
and
b/regression-test/data/external_table_p0/paimon/test_paimon_schema_change.out
differ
diff --git
a/regression-test/suites/external_table_p0/iceberg/iceberg_query_tag_branch.groovy
b/regression-test/suites/external_table_p0/iceberg/iceberg_query_tag_branch.groovy
index 4062a6df649..1fb21cb8c27 100644
---
a/regression-test/suites/external_table_p0/iceberg/iceberg_query_tag_branch.groovy
+++
b/regression-test/suites/external_table_p0/iceberg/iceberg_query_tag_branch.groovy
@@ -220,6 +220,11 @@ suite("iceberg_query_tag_branch",
"p0,external,doris,external_docker,external_do
sql """ select * from tag_branch_table@tag(b1) ; """
exception "does not have tag named b1"
}
+
+ test {
+ sql """ select * from tag_branch_table@brand(b1) ; """
+ exception "Invalid param type: brand"
+ }
}
try {
diff --git
a/regression-test/suites/external_table_p0/iceberg/test_iceberg_schema_change_with_branch_tag.groovy
b/regression-test/suites/external_table_p0/iceberg/test_iceberg_schema_change_with_branch_tag.groovy
new file mode 100644
index 00000000000..470499c8f4b
--- /dev/null
+++
b/regression-test/suites/external_table_p0/iceberg/test_iceberg_schema_change_with_branch_tag.groovy
@@ -0,0 +1,67 @@
+// Licensed to the Apache Software Foundation (ASF) under one
+// or more contributor license agreements. See the NOTICE file
+// distributed with this work for additional information
+// regarding copyright ownership. The ASF licenses this file
+// to you under the Apache License, Version 2.0 (the
+// "License"); you may not use this file except in compliance
+// with the License. You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing,
+// software distributed under the License is distributed on an
+// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+// KIND, either express or implied. See the License for the
+// specific language governing permissions and limitations
+// under the License.
+
+
+
+suite("iceberg_schema_change_with_branch_tag",
"p0,external,doris,external_docker,external_docker_doris") {
+
+ String enabled = context.config.otherConfigs.get("enableIcebergTest")
+ if (enabled == null || !enabled.equalsIgnoreCase("true")) {
+ logger.info("disable iceberg test.")
+ return
+ }
+
+ String rest_port = context.config.otherConfigs.get("iceberg_rest_uri_port")
+ String minio_port = context.config.otherConfigs.get("iceberg_minio_port")
+ String externalEnvIp = context.config.otherConfigs.get("externalEnvIp")
+ String catalog_name = "iceberg_schema_change_with_branch_tag"
+
+ sql """drop catalog if exists ${catalog_name}"""
+ sql """
+ CREATE CATALOG ${catalog_name} PROPERTIES (
+ 'type'='iceberg',
+ 'iceberg.catalog.type'='rest',
+ 'uri' = 'http://${externalEnvIp}:${rest_port}',
+ "s3.access_key" = "admin",
+ "s3.secret_key" = "password",
+ "s3.endpoint" = "http://${externalEnvIp}:${minio_port}",
+ "s3.region" = "us-east-1"
+ );"""
+
+ logger.info("catalog " + catalog_name + " created")
+ sql """switch ${catalog_name};"""
+ logger.info("switched to catalog " + catalog_name)
+ sql """ use test_db;"""
+ // init table test_schema_change_with_branch_tag
+ sql """ drop table if exists test_schema_change_with_branch_tag; """
+ sql """ create table test_schema_change_with_branch_tag (id int); """
+ sql """ insert into test_schema_change_with_branch_tag values (1), (2),
(3); """
+ // create branch and tag
+ sql """ alter table test_schema_change_with_branch_tag create branch
test_branch; """
+ sql """ alter table test_schema_change_with_branch_tag create tag
test_tag; """
+ // schema change but no insert data, no snaptshot
+ sql """ alter table test_schema_change_with_branch_tag add column name
string; """
+
+ // this should get latest schema
+ qt_desc_schema """ desc test_schema_change_with_branch_tag; """
+ qt_select_table """ select * from test_schema_change_with_branch_tag order
by id; """
+
+ qt_select_branch """ select * from test_schema_change_with_branch_tag FOR
VERSION AS OF 'test_branch' """
+ qt_select_branch2 """ select * from
test_schema_change_with_branch_tag@branch(test_branch) """
+ qt_select_tag """ select * from test_schema_change_with_branch_tag FOR
VERSION AS OF 'test_tag' """
+ qt_select_tag2 """ select * from
test_schema_change_with_branch_tag@tag(test_tag) """
+}
diff --git
a/regression-test/suites/external_table_p0/paimon/paimon_incr_read.groovy
b/regression-test/suites/external_table_p0/paimon/paimon_incr_read.groovy
index bfc2457c6a3..08a271e054b 100644
--- a/regression-test/suites/external_table_p0/paimon/paimon_incr_read.groovy
+++ b/regression-test/suites/external_table_p0/paimon/paimon_incr_read.groovy
@@ -94,7 +94,7 @@ suite("test_paimon_incr_read",
"p0,external,doris,external_docker,external_docke
}
test {
sql """select * from paimon_incr@incr('startSnapshotId'=1,
'endSnapshotId'=2) for version as of 1"""
- exception "Can not specify scan params and table snapshot"
+ exception "should not spec both snapshot and scan params"
}
}
diff --git
a/regression-test/suites/external_table_p0/paimon/paimon_time_travel.groovy
b/regression-test/suites/external_table_p0/paimon/paimon_time_travel.groovy
index b6d3caddb8c..99a4e94a4c6 100644
--- a/regression-test/suites/external_table_p0/paimon/paimon_time_travel.groovy
+++ b/regression-test/suites/external_table_p0/paimon/paimon_time_travel.groovy
@@ -249,15 +249,15 @@ suite("paimon_time_travel",
"p0,external,doris,external_docker,external_docker_d
}
test {
sql """ select * from ${tableName}@tag('name'='not_exists_tag');
"""
- exception "Tag 'not_exists_tag' does not exist"
+ exception "can't find snapshot by tag: not_exists_tag"
}
test {
sql """ select * from ${tableName}@tag(not_exists_tag); """
- exception "Tag 'not_exists_tag' does not exist"
+ exception "can't find snapshot by tag: not_exists_tag"
}
test {
sql """ select * from ${tableName} for version as of
'not_exists_tag'; """
- exception "Tag 'not_exists_tag' does not exist"
+ exception "can't find snapshot by tag: not_exists_tag"
}
// Use branch function to query tags
@@ -270,6 +270,11 @@ suite("paimon_time_travel",
"p0,external,doris,external_docker,external_docker_d
exception "must contain key 'name' in params"
}
+ test {
+ sql """ select * from
${tableName}@brand('nme'='not_exists_branch'); """
+ exception "Invalid param type: brand"
+ }
+
} finally {
// sql """drop catalog if exists ${catalog_name}"""
}
diff --git
a/regression-test/suites/external_table_p0/paimon/test_paimon_schema_change.groovy
b/regression-test/suites/external_table_p0/paimon/test_paimon_schema_change.groovy
index 2e9f9790a28..04a7200d6c2 100644
---
a/regression-test/suites/external_table_p0/paimon/test_paimon_schema_change.groovy
+++
b/regression-test/suites/external_table_p0/paimon/test_paimon_schema_change.groovy
@@ -81,7 +81,15 @@ suite("test_paimon_schema_change",
"p0,external,doris,external_docker,external_d
qt_count_3 """ select count(*) from sc_parquet;"""
qt_count_4 """ select count(*) from sc_orc;"""
-
+ // should get latest schema
+ qt_desc_latest_schema """ desc test_paimon_spark.test_schema_change;
"""
+ qt_query_latest_schema """ SELECT * FROM
test_paimon_spark.test_schema_change; """
+ // shoudle get latest schme for branch
+ qt_time_travel_schema_branch """ select * from
test_paimon_spark.test_schema_change@branch(branch1); """
+ // should get the schema in tag
+ qt_time_travel_schema_tag """ select * from
test_paimon_spark.test_schema_change for version as of 'tag1'; """
+ qt_time_travel_schema_tag2 """ select * from
test_paimon_spark.test_schema_change@tag(tag1); """
+ qt_time_travel_schema_snapshot """ select * from
test_paimon_spark.test_schema_change for version as of '1'; """
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]