This is an automated email from the ASF dual-hosted git repository.
kirs pushed a commit to branch branch-2.1
in repository https://gitbox.apache.org/repos/asf/doris.git
The following commit(s) were added to refs/heads/branch-2.1 by this push:
new 8ac54b63c59 branch-2.1 [Fix](Catalog)Standardize the use of
authentication attributes in the Catalog to avoid creating a large number of
Authenticator objects. (#46106)
8ac54b63c59 is described below
commit 8ac54b63c5919d7f5b547ecc487203d35b053041
Author: Calvin Kirs <[email protected]>
AuthorDate: Fri Dec 27 20:36:12 2024 +0800
branch-2.1 [Fix](Catalog)Standardize the use of authentication attributes
in the Catalog to avoid creating a large number of Authenticator objects.
(#46106)
…utes in the Catalog to avoid creating a large number of Authenticator
objects.
#46052
---
.../doris/datasource/hive/HMSExternalCatalog.java | 13 +++++----
.../datasource/hive/HiveMetaStoreClientHelper.java | 6 -----
.../apache/doris/datasource/hudi/HudiUtils.java | 23 +++++++++-------
.../doris/datasource/hudi/source/HudiScanNode.java | 20 +++++++++-----
.../datasource/iceberg/IcebergMetadataCache.java | 11 +++++---
.../doris/datasource/iceberg/IcebergUtils.java | 31 +++++++++++++---------
.../datasource/iceberg/source/IcebergScanNode.java | 12 ++++++---
7 files changed, 66 insertions(+), 50 deletions(-)
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
index 7498e6edd93..6c320aa4396 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HMSExternalCatalog.java
@@ -101,12 +101,10 @@ public class HMSExternalCatalog extends ExternalCatalog {
* Default constructor for HMSExternalCatalog.
*/
public HMSExternalCatalog(long catalogId, String name, String resource,
Map<String, String> props,
- String comment) {
+ String comment) {
super(catalogId, name, InitCatalogLog.Type.HMS, comment);
props = PropertyConverter.convertToMetaProperties(props);
catalogProperty = new CatalogProperty(resource, props);
- AuthenticationConfig config =
AuthenticationConfig.getKerberosConfig(getConfiguration());
- authenticator = HadoopAuthenticator.getHadoopAuthenticator(config);
}
@Override
@@ -170,10 +168,11 @@ public class HMSExternalCatalog extends ExternalCatalog {
@Override
protected void initLocalObjectsImpl() {
- preExecutionAuthenticator = new PreExecutionAuthenticator();
- if (authenticator == null) {
+ this.preExecutionAuthenticator = new PreExecutionAuthenticator();
+ if (this.authenticator == null) {
AuthenticationConfig config =
AuthenticationConfig.getKerberosConfig(getConfiguration());
- authenticator = HadoopAuthenticator.getHadoopAuthenticator(config);
+ this.authenticator =
HadoopAuthenticator.getHadoopAuthenticator(config);
+
this.preExecutionAuthenticator.setHadoopAuthenticator(authenticator);
}
HiveConf hiveConf = null;
@@ -263,7 +262,7 @@ public class HMSExternalCatalog extends ExternalCatalog {
LOG.debug("create database [{}]", dbName);
}
- ExternalDatabase<? extends ExternalTable> db = buildDbForInit(dbName,
null, dbId, logType, false);
+ ExternalDatabase<? extends ExternalTable> db = buildDbForInit(dbName,
null, dbId, logType, false);
if (useMetaCache.get()) {
if (isInitialized()) {
metaCache.updateCache(dbName, db,
Util.genIdByName(getQualifiedName(dbName)));
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
index c53bc785546..f9f82c16d38 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hive/HiveMetaStoreClientHelper.java
@@ -31,7 +31,6 @@ import org.apache.doris.analysis.NullLiteral;
import org.apache.doris.analysis.SlotRef;
import org.apache.doris.analysis.StringLiteral;
import org.apache.doris.catalog.ArrayType;
-import org.apache.doris.catalog.Env;
import org.apache.doris.catalog.MapType;
import org.apache.doris.catalog.PrimitiveType;
import org.apache.doris.catalog.ScalarType;
@@ -41,7 +40,6 @@ import org.apache.doris.catalog.Type;
import org.apache.doris.common.DdlException;
import org.apache.doris.common.security.authentication.AuthenticationConfig;
import org.apache.doris.common.security.authentication.HadoopAuthenticator;
-import org.apache.doris.datasource.ExternalCatalog;
import org.apache.doris.thrift.TExprOpcode;
import com.google.common.base.Strings;
@@ -821,10 +819,6 @@ public class HiveMetaStoreClientHelper {
return hudiSchema;
}
- public static <T> T ugiDoAs(long catalogId, PrivilegedExceptionAction<T>
action) {
- return ugiDoAs(((ExternalCatalog)
Env.getCurrentEnv().getCatalogMgr().getCatalog(catalogId)).getConfiguration(),
- action);
- }
public static <T> T ugiDoAs(Configuration conf,
PrivilegedExceptionAction<T> action) {
// if hive config is not ready, then use hadoop kerberos to login
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/HudiUtils.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/HudiUtils.java
index 3dbbcf03da0..889ca4b5418 100644
--- a/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/HudiUtils.java
+++ b/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/HudiUtils.java
@@ -34,6 +34,7 @@ import org.apache.avro.LogicalType;
import org.apache.avro.LogicalTypes;
import org.apache.avro.Schema;
import org.apache.avro.Schema.Field;
+import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.HoodieActiveTimeline;
@@ -264,22 +265,24 @@ public class HudiUtils {
return partitionValues;
}
String queryInstant =
tableSnapshot.get().getTime().replaceAll("[-: ]", "");
-
- partitionValues =
- HiveMetaStoreClientHelper.ugiDoAs(
-
HiveMetaStoreClientHelper.getConfiguration(hmsTable),
- () -> processor.getSnapshotPartitionValues(
- hmsTable, hudiClient, queryInstant,
useHiveSyncPartition));
+ try {
+ partitionValues =
hmsTable.getCatalog().getPreExecutionAuthenticator().execute(() ->
+ processor.getSnapshotPartitionValues(hmsTable,
hudiClient, queryInstant, useHiveSyncPartition));
+ } catch (Exception e) {
+ throw new
RuntimeException(ExceptionUtils.getRootCauseMessage(e), e);
+ }
} else {
HoodieTimeline timeline =
hudiClient.getCommitsAndCompactionTimeline().filterCompletedInstants();
Option<HoodieInstant> snapshotInstant = timeline.lastInstant();
if (!snapshotInstant.isPresent()) {
return partitionValues;
}
- partitionValues =
- HiveMetaStoreClientHelper.ugiDoAs(
-
HiveMetaStoreClientHelper.getConfiguration(hmsTable),
- () -> processor.getPartitionValues(hmsTable,
hudiClient, useHiveSyncPartition));
+ try {
+ partitionValues =
hmsTable.getCatalog().getPreExecutionAuthenticator().execute(()
+ -> processor.getPartitionValues(hmsTable, hudiClient,
useHiveSyncPartition));
+ } catch (Exception e) {
+ throw new
RuntimeException(ExceptionUtils.getRootCauseMessage(e), e);
+ }
}
return partitionValues;
}
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/source/HudiScanNode.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/source/HudiScanNode.java
index 35c9905d6e8..5962cd7b26f 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/source/HudiScanNode.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/hudi/source/HudiScanNode.java
@@ -34,7 +34,6 @@ import org.apache.doris.datasource.ExternalTable;
import org.apache.doris.datasource.FileSplit;
import org.apache.doris.datasource.SchemaCacheValue;
import org.apache.doris.datasource.TableFormatType;
-import org.apache.doris.datasource.hive.HiveMetaStoreClientHelper;
import org.apache.doris.datasource.hive.HivePartition;
import org.apache.doris.datasource.hive.source.HiveScanNode;
import org.apache.doris.datasource.hudi.HudiSchemaCacheValue;
@@ -51,6 +50,7 @@ import org.apache.doris.thrift.TTableFormatFileDesc;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
+import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.hudi.common.fs.FSUtils;
import org.apache.hudi.common.model.BaseFile;
import org.apache.hudi.common.model.FileSlice;
@@ -375,9 +375,12 @@ public class HudiScanNode extends HiveScanNode {
return getIncrementalSplits();
}
if (!partitionInit) {
- prunedPartitions = HiveMetaStoreClientHelper.ugiDoAs(
- HiveMetaStoreClientHelper.getConfiguration(hmsTable),
- () -> getPrunedPartitions(hudiClient));
+ try {
+ prunedPartitions =
hmsTable.getCatalog().getPreExecutionAuthenticator().execute(()
+ -> getPrunedPartitions(hudiClient));
+ } catch (Exception e) {
+ throw new UserException(ExceptionUtils.getRootCauseMessage(e),
e);
+ }
partitionInit = true;
}
List<Split> splits = Collections.synchronizedList(new ArrayList<>());
@@ -437,9 +440,12 @@ public class HudiScanNode extends HiveScanNode {
}
if (!partitionInit) {
// Non partition table will get one dummy partition
- prunedPartitions = HiveMetaStoreClientHelper.ugiDoAs(
- HiveMetaStoreClientHelper.getConfiguration(hmsTable),
- () -> getPrunedPartitions(hudiClient));
+ try {
+ prunedPartitions =
hmsTable.getCatalog().getPreExecutionAuthenticator().execute(()
+ -> getPrunedPartitions(hudiClient));
+ } catch (Exception e) {
+ throw new
RuntimeException(ExceptionUtils.getRootCauseMessage(e), e);
+ }
partitionInit = true;
}
int numPartitions =
ConnectContext.get().getSessionVariable().getNumPartitionsInBatchMode();
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java
index ad347ca78f2..e209fffa068 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergMetadataCache.java
@@ -25,13 +25,13 @@ import org.apache.doris.datasource.CatalogIf;
import org.apache.doris.datasource.ExternalCatalog;
import org.apache.doris.datasource.ExternalMetaCacheMgr;
import org.apache.doris.datasource.hive.HMSExternalCatalog;
-import org.apache.doris.datasource.hive.HiveMetaStoreClientHelper;
import org.apache.doris.thrift.TIcebergMetadataParams;
import com.github.benmanes.caffeine.cache.LoadingCache;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
+import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.iceberg.ManifestFiles;
import org.apache.iceberg.SerializableTable;
import org.apache.iceberg.Snapshot;
@@ -110,8 +110,13 @@ public class IcebergMetadataCache {
} else {
throw new RuntimeException("Only support 'hms' and 'iceberg' type
for iceberg table");
}
- return HiveMetaStoreClientHelper.ugiDoAs(((ExternalCatalog)
key.catalog).getConfiguration(),
- () -> ops.loadTable(key.dbName, key.tableName));
+ try {
+ return ((ExternalCatalog)
key.catalog).getPreExecutionAuthenticator().execute(()
+ -> ops.loadTable(key.dbName, key.tableName));
+ } catch (Exception e) {
+ throw new RuntimeException(ExceptionUtils.getRootCauseMessage(e),
e);
+ }
+
}
public void invalidateCatalogCache(long catalogId) {
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java
index 58519d92636..98eab7faba0 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/IcebergUtils.java
@@ -47,12 +47,12 @@ import org.apache.doris.common.UserException;
import org.apache.doris.common.info.SimpleTableInfo;
import org.apache.doris.common.util.TimeUtils;
import org.apache.doris.datasource.ExternalCatalog;
-import org.apache.doris.datasource.hive.HiveMetaStoreClientHelper;
import org.apache.doris.datasource.property.constants.HMSProperties;
import org.apache.doris.nereids.exceptions.NotSupportedException;
import org.apache.doris.thrift.TExprOpcode;
import com.google.common.collect.Lists;
+import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.iceberg.CatalogProperties;
import org.apache.iceberg.FileFormat;
import org.apache.iceberg.PartitionSpec;
@@ -574,18 +574,23 @@ public class IcebergUtils {
* Get iceberg schema from catalog and convert them to doris schema
*/
public static List<Column> getSchema(ExternalCatalog catalog, String
dbName, String name) {
- return HiveMetaStoreClientHelper.ugiDoAs(catalog.getConfiguration(),
() -> {
- org.apache.iceberg.Table icebergTable = getIcebergTable(catalog,
dbName, name);
- Schema schema = icebergTable.schema();
- List<Types.NestedField> columns = schema.columns();
- List<Column> tmpSchema =
Lists.newArrayListWithCapacity(columns.size());
- for (Types.NestedField field : columns) {
- tmpSchema.add(new Column(field.name().toLowerCase(Locale.ROOT),
- IcebergUtils.icebergTypeToDorisType(field.type()),
true, null, true, field.doc(), true,
-
schema.caseInsensitiveFindField(field.name()).fieldId()));
- }
- return tmpSchema;
- });
+ try {
+ return catalog.getPreExecutionAuthenticator().execute(() -> {
+ org.apache.iceberg.Table icebergTable =
getIcebergTable(catalog, dbName, name);
+ Schema schema = icebergTable.schema();
+ List<Types.NestedField> columns = schema.columns();
+ List<Column> tmpSchema =
Lists.newArrayListWithCapacity(columns.size());
+ for (Types.NestedField field : columns) {
+ tmpSchema.add(new
Column(field.name().toLowerCase(Locale.ROOT),
+ IcebergUtils.icebergTypeToDorisType(field.type()),
true, null, true, field.doc(), true,
+
schema.caseInsensitiveFindField(field.name()).fieldId()));
+ }
+ return tmpSchema;
+ });
+ } catch (Exception e) {
+ throw new RuntimeException(ExceptionUtils.getRootCauseMessage(e),
e);
+ }
+
}
diff --git
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/source/IcebergScanNode.java
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/source/IcebergScanNode.java
index e43e8893332..29c07be8192 100644
---
a/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/source/IcebergScanNode.java
+++
b/fe/fe-core/src/main/java/org/apache/doris/datasource/iceberg/source/IcebergScanNode.java
@@ -31,7 +31,6 @@ import org.apache.doris.datasource.ExternalTable;
import org.apache.doris.datasource.FileQueryScanNode;
import org.apache.doris.datasource.TableFormatType;
import org.apache.doris.datasource.hive.HMSExternalTable;
-import org.apache.doris.datasource.hive.HiveMetaStoreClientHelper;
import org.apache.doris.datasource.iceberg.IcebergExternalCatalog;
import org.apache.doris.datasource.iceberg.IcebergExternalTable;
import org.apache.doris.datasource.iceberg.IcebergUtils;
@@ -51,6 +50,7 @@ import org.apache.doris.thrift.TTableFormatFileDesc;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
+import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.iceberg.BaseTable;
import org.apache.iceberg.CombinedScanTask;
import org.apache.iceberg.DeleteFile;
@@ -183,8 +183,12 @@ public class IcebergScanNode extends FileQueryScanNode {
@Override
public List<Split> getSplits(int numBackends) throws UserException {
- return
HiveMetaStoreClientHelper.ugiDoAs(source.getCatalog().getConfiguration(),
- () -> doGetSplits(numBackends));
+ try {
+ return
source.getCatalog().getPreExecutionAuthenticator().execute(() ->
doGetSplits(numBackends));
+ } catch (Exception e) {
+ throw new RuntimeException(ExceptionUtils.getRootCauseMessage(e),
e);
+ }
+
}
private List<Split> doGetSplits(int numBackends) throws UserException {
@@ -250,7 +254,7 @@ public class IcebergScanNode extends FileQueryScanNode {
throw new NotSupportedException("Unable to read Iceberg table with
dropped old partition column.");
}
try (CloseableIterable<CombinedScanTask> combinedScanTasks =
- TableScanUtil.planTasks(fileScanTasks, realFileSplitSize, 1,
0)) {
+ TableScanUtil.planTasks(fileScanTasks, realFileSplitSize,
1, 0)) {
combinedScanTasks.forEach(taskGrp ->
taskGrp.files().forEach(splitTask -> {
if (isPartitionedTable) {
StructLike structLike = splitTask.file().partition();
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]