This is an automated email from the ASF dual-hosted git repository.
simhadrig pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/hive.git
The following commit(s) were added to refs/heads/master by this push:
new d476a993294 HIVE-28934:
PlanUtils.configureJobPropertiesForStorageHandler should use hive session
configuration. (#5788)
d476a993294 is described below
commit d476a993294fcac7396b7666eaab9698be485955
Author: Vlad Rozov <[email protected]>
AuthorDate: Tue May 6 08:05:01 2025 -0700
HIVE-28934: PlanUtils.configureJobPropertiesForStorageHandler should use
hive session configuration. (#5788)
---
.../apache/iceberg/mr/hive/HiveIcebergStorageHandler.java | 13 ++++++++++---
ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java | 10 ++++------
2 files changed, 14 insertions(+), 9 deletions(-)
diff --git
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java
index 82b9d9084e8..dd73133876b 100644
---
a/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java
+++
b/iceberg/iceberg-handler/src/main/java/org/apache/iceberg/mr/hive/HiveIcebergStorageHandler.java
@@ -92,6 +92,7 @@
import org.apache.hadoop.hive.ql.io.sarg.ConvertAstToSearchArg;
import org.apache.hadoop.hive.ql.io.sarg.SearchArgument;
import org.apache.hadoop.hive.ql.metadata.DummyPartition;
+import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
import org.apache.hadoop.hive.ql.metadata.HiveStoragePredicateHandler;
@@ -1804,13 +1805,19 @@ private String
collectColumnAndReplaceDummyValues(ExprNodeDesc node, String foun
* </ul>
* @param tableProps table properties, must be not null
*/
- private void fallbackToNonVectorizedModeBasedOnProperties(Properties
tableProps) {
+ private static void fallbackToNonVectorizedModeBasedOnProperties(Properties
tableProps) {
Schema tableSchema =
SchemaParser.fromJson(tableProps.getProperty(InputFormatConfig.TABLE_SCHEMA));
+ String tableMeta =
tableProps.getProperty(IcebergAcidUtil.META_TABLE_PROPERTY);
+ boolean isMetaTable = tableMeta != null &&
IcebergMetadataTables.isValidMetaTable(tableMeta);
if
(FileFormat.AVRO.name().equalsIgnoreCase(tableProps.getProperty(TableProperties.DEFAULT_FILE_FORMAT))
||
-
isValidMetadataTable(tableProps.getProperty(IcebergAcidUtil.META_TABLE_PROPERTY))
||
+ isMetaTable ||
hasOrcTimeInSchema(tableProps, tableSchema) ||
!hasParquetNestedTypeWithinListOrMap(tableProps, tableSchema)) {
- conf.setBoolean(ConfVars.HIVE_VECTORIZATION_ENABLED.varname, false);
+ try {
+
Hive.get(false).getConf().setBoolVar(ConfVars.HIVE_VECTORIZATION_ENABLED,
false);
+ } catch (HiveException e) {
+ throw new RuntimeException(e);
+ }
}
}
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
index ca98cbd6635..375ae47420a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/plan/PlanUtils.java
@@ -20,6 +20,7 @@
import static org.apache.commons.lang3.StringUtils.isNotBlank;
import static
org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_LOCATION;
+import static
org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE;
import static
org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.TABLE_IS_CTAS;
import static org.apache.hive.common.util.HiveStringUtils.quoteComments;
@@ -57,7 +58,6 @@
import org.apache.hadoop.hive.ql.io.IgnoreKeyTextOutputFormat;
import org.apache.hadoop.hive.ql.io.RCFileInputFormat;
import org.apache.hadoop.hive.ql.io.RCFileOutputFormat;
-import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.ql.metadata.HiveStorageHandler;
import org.apache.hadoop.hive.ql.metadata.HiveUtils;
@@ -932,11 +932,9 @@ private static void
configureJobPropertiesForStorageHandler(boolean input,
}
try {
- HiveStorageHandler storageHandler =
- HiveUtils.getStorageHandler(
- Hive.get().getConf(),
- tableDesc.getProperties().getProperty(
-
org.apache.hadoop.hive.metastore.api.hive_metastoreConstants.META_TABLE_STORAGE));
+ HiveConf hiveConf = SessionState.getSessionConf();
+ String className =
tableDesc.getProperties().getProperty(META_TABLE_STORAGE);
+ HiveStorageHandler storageHandler =
HiveUtils.getStorageHandler(hiveConf, className);
if (storageHandler != null) {
Map<String, String> jobProperties = new LinkedHashMap<String,
String>();
Map<String, String> jobSecrets = new LinkedHashMap<String, String>();