This is an automated email from the ASF dual-hosted git repository.
yiguolei pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/master by this push:
new a07f59de8c [Fix](multi-catalog) Fix hadoop viewfs issues. (#24507)
a07f59de8c is described below
commit a07f59de8c45ac26bfd0140c8cc7fdd75c838bf3
Author: Qi Chen <[email protected]>
AuthorDate: Mon Sep 18 09:51:33 2023 +0800
[Fix](multi-catalog) Fix hadoop viewfs issues. (#24507)
Error Msg:
Caused by: org.apache.doris.datasource.CacheException: failed to get input
splits for
FileCacheKey{location='viewfs://my-cluster/ns1/usr/hive/warehouse/viewfs.db/parquet_table',
inputFormat='org.apache.hadoop.hive.ql.io.parquet.MapredParquetInputFormat'}
in catalog test_viewfs_hive
at
org.apache.doris.datasource.hive.HiveMetaStoreCache.loadFiles(HiveMetaStoreCache.java:466)
~[doris-fe.jar:1.2-SNAPSHOT]
at
org.apache.doris.datasource.hive.HiveMetaStoreCache.access$400(HiveMetaStoreCache.java:112)
~[doris-fe.jar:1.2-SNAPSHOT]
at
org.apache.doris.datasource.hive.HiveMetaStoreCache$3.load(HiveMetaStoreCache.java:210)
~[doris-fe.jar:1.2-SNAPSHOT]
at
org.apache.doris.datasource.hive.HiveMetaStoreCache$3.load(HiveMetaStoreCache.java:202)
~[doris-fe.jar:1.2-SNAPSHOT]
at
org.apache.doris.common.util.CacheBulkLoader.lambda$null$0(CacheBulkLoader.java:42)
~[doris-fe.jar:1.2-SNAPSHOT]
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
~[?:1.8.0_131]
... 3 more
Caused by: org.apache.doris.common.UserException: errCode = 2,
detailMessage = Failed to list located status for path:
viewfs://my-cluster/ns1/usr/hive/warehouse/viewfs.db/parquet_table
at
org.apache.doris.fs.remote.RemoteFileSystem.listLocatedFiles(RemoteFileSystem.java:54)
~[doris-fe.jar:1.2-SNAPSHOT]
at
org.apache.doris.datasource.hive.HiveMetaStoreCache.getFileCache(HiveMetaStoreCache.java:381)
~[doris-fe.jar:1.2-SNAPSHOT]
at
org.apache.doris.datasource.hive.HiveMetaStoreCache.loadFiles(HiveMetaStoreCache.java:432)
~[doris-fe.jar:1.2-SNAPSHOT]
at
org.apache.doris.datasource.hive.HiveMetaStoreCache.access$400(HiveMetaStoreCache.java:112)
~[doris-fe.jar:1.2-SNAPSHOT]
at
org.apache.doris.datasource.hive.HiveMetaStoreCache$3.load(HiveMetaStoreCache.java:210)
~[doris-fe.jar:1.2-SNAPSHOT]
at
org.apache.doris.datasource.hive.HiveMetaStoreCache$3.load(HiveMetaStoreCache.java:202)
~[doris-fe.jar:1.2-SNAPSHOT]
at
org.apache.doris.common.util.CacheBulkLoader.lambda$null$0(CacheBulkLoader.java:42)
~[doris-fe.jar:1.2-SNAPSHOT]
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
~[?:1.8.0_131]
... 3 more
Caused by: java.nio.file.AccessDeniedException:
viewfs://my-cluster/ns1/usr/hive/warehouse/viewfs.db/parquet_table:
org.apache.hadoop.fs.s3a.auth.NoAuthWithAWSException: No AWS Credentials
provided by TemporaryAWSCredentialsProvider SimpleAWSCredentialsProvider
EnvironmentVariableCredentialsProvider IAMInstanceCredentialsProvider :
com.amazonaws.SdkClientException: Unable to load AWS credentials from
environment variables (AWS_ACCESS_KEY_ID (or AWS_ACCESS_KEY) and AWS_SECRET_KEY
(or A [...]
at
org.apache.hadoop.fs.s3a.S3AUtils.translateException(S3AUtils.java:215)
~[hadoop-aws-3.3.6.jar:?]
at
org.apache.hadoop.fs.s3a.Invoker.onceInTheFuture(Invoker.java:190)
~[hadoop-aws-3.3.6.jar:?]
at
org.apache.hadoop.fs.s3a.Listing$ObjectListingIterator.next(Listing.java:651)
~[hadoop-aws-3.3.6.jar:?]
at
org.apache.hadoop.fs.s3a.Listing$FileStatusListingIterator.requestNextBatch(Listing.java:430)
~[hadoop-aws-3.3.6.jar:?]
at
org.apache.hadoop.fs.s3a.Listing$FileStatusListingIterator.<init>(Listing.java:372)
~[hadoop-aws-3.3.6.jar:?]
at
org.apache.hadoop.fs.s3a.Listing.createFileStatusListingIterator(Listing.java:143)
~[hadoop-aws-3.3.6.jar:?]
at
org.apache.hadoop.fs.s3a.Listing.getListFilesAssumingDir(Listing.java:211)
~[hadoop-aws-3.3.6.jar:?]
at
org.apache.hadoop.fs.s3a.S3AFileSystem.innerListFiles(S3AFileSystem.java:4898)
~[hadoop-aws-3.3.6.jar:?]
at
org.apache.hadoop.fs.s3a.S3AFileSystem.lambda$listFiles$38(S3AFileSystem.java:4840)
~[hadoop-aws-3.3.6.jar:?]
at
org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.invokeTrackingDuration(IOStatisticsBinding.java:547)
~[hadoop-common-3.3.6.jar:?]
at
org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.lambda$trackDurationOfOperation$5(IOStatisticsBinding.java:528)
~[hadoop-common-3.3.6.jar:?]
at
org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.trackDuration(IOStatisticsBinding.java:449)
~[hadoop-common-3.3.6.jar:?]
at
org.apache.hadoop.fs.s3a.S3AFileSystem.trackDurationAndSpan(S3AFileSystem.java:2480)
~[hadoop-aws-3.3.6.jar:?]
at
org.apache.hadoop.fs.s3a.S3AFileSystem.trackDurationAndSpan(S3AFileSystem.java:2499)
~[hadoop-aws-3.3.6.jar:?]
at
org.apache.hadoop.fs.s3a.S3AFileSystem.listFiles(S3AFileSystem.java:4839)
~[hadoop-aws-3.3.6.jar:?]
at
org.apache.doris.fs.remote.RemoteFileSystem.listLocatedFiles(RemoteFileSystem.java:50)
~[doris-fe.jar:1.2-SNAPSHOT]
at
org.apache.doris.datasource.hive.HiveMetaStoreCache.getFileCache(HiveMetaStoreCache.java:381)
~[doris-fe.jar:1.2-SNAPSHOT]
at
org.apache.doris.datasource.hive.HiveMetaStoreCache.loadFiles(HiveMetaStoreCache.java:432)
~[doris-fe.jar:1.2-SNAPSHOT]
at
org.apache.doris.datasource.hive.HiveMetaStoreCache.access$400(HiveMetaStoreCache.java:112)
~[doris-fe.jar:1.2-SNAPSHOT]
at
org.apache.doris.datasource.hive.HiveMetaStoreCache$3.load(HiveMetaStoreCache.java:210)
~[doris-fe.jar:1.2-SNAPSHOT]
at
org.apache.doris.datasource.hive.HiveMetaStoreCache$3.load(HiveMetaStoreCache.java:202)
~[doris-fe.jar:1.2-SNAPSHOT]
at
org.apache.doris.common.util.CacheBulkLoader.lambda$null$0(CacheBulkLoader.java:42)
~[doris-fe.jar:1.2-SNAPSHOT]
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
~[?:1.8.0_131]
... 3 more
---
.../src/main/java/org/apache/doris/analysis/StorageBackend.java | 5 +++--
.../java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java | 2 +-
2 files changed, 4 insertions(+), 3 deletions(-)
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/analysis/StorageBackend.java
b/fe/fe-core/src/main/java/org/apache/doris/analysis/StorageBackend.java
index f3d7f7e49f..e881154924 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/analysis/StorageBackend.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/analysis/StorageBackend.java
@@ -65,8 +65,9 @@ public class StorageBackend implements ParseNode {
}
} else if (type == StorageBackend.StorageType.S3 &&
!schema.equalsIgnoreCase("s3")) {
throw new AnalysisException("Invalid export path. please use valid
's3://' path.");
- } else if (type == StorageBackend.StorageType.HDFS &&
!schema.equalsIgnoreCase("hdfs")) {
- throw new AnalysisException("Invalid export path. please use valid
'HDFS://' path.");
+ } else if (type == StorageBackend.StorageType.HDFS &&
!schema.equalsIgnoreCase("hdfs")
+ && !schema.equalsIgnoreCase("viewfs")) {
+ throw new AnalysisException("Invalid export path. please use valid
'HDFS://' or 'viewfs://' path.");
} else if (type == StorageBackend.StorageType.LOCAL &&
!schema.equalsIgnoreCase("file")) {
throw new AnalysisException(
"Invalid export path. please use valid '" +
OutFileClause.LOCAL_FILE_PREFIX + "' path.");
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
index ec1704547a..0a85d9ff5b 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreCache.java
@@ -416,7 +416,7 @@ public class HiveMetaStoreCache {
if (uri.getScheme() != null) {
String scheme = uri.getScheme();
updateJobConf("fs." + scheme + ".impl.disable.cache",
"true");
- if (!scheme.equals("hdfs")) {
+ if (!scheme.equals("hdfs") && !scheme.equals("viewfs")) {
updateJobConf("fs." + scheme + ".impl",
PropertyConverter.getHadoopFSImplByScheme(scheme));
}
}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]