This is an automated email from the ASF dual-hosted git repository.
liuxun pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/gravitino.git
The following commit(s) were added to refs/heads/main by this push:
new eb8fa67eb [#5846] build(dev): Hive image supports for JDBC SQL
standard authorization (#5849)
eb8fa67eb is described below
commit eb8fa67eb7d45b46d47ccdca86832d0823bee3f7
Author: roryqi <[email protected]>
AuthorDate: Tue Dec 17 11:37:13 2024 +0800
[#5846] build(dev): Hive image supports for JDBC SQL standard authorization
(#5849)
### What changes were proposed in this pull request?
Add support for JDBC SQL standard authorization
### Why are the changes needed?
Fix: #5846
### Does this PR introduce _any_ user-facing change?
No.
### How was this patch tested?
I tested locally.
---
dev/docker/hive/Dockerfile | 2 +
dev/docker/hive/hive-site-for-sql-base-auth.xml | 101 +++++++++++++++++++++
.../hive/hiveserver2-site-for-sql-base-auth.xml | 35 +++++++
dev/docker/hive/start.sh | 10 ++
docs/docker-image-details.md | 5 +
5 files changed, 153 insertions(+)
diff --git a/dev/docker/hive/Dockerfile b/dev/docker/hive/Dockerfile
index cd79e2562..4aa5aac81 100644
--- a/dev/docker/hive/Dockerfile
+++ b/dev/docker/hive/Dockerfile
@@ -171,6 +171,8 @@ RUN ln -s /opt/apache-hive-${HIVE3_VERSION}-bin
${HIVE3_HOME}
# Add hive configuration to temporary directory
ADD hive-site.xml ${HIVE_TMP_CONF_DIR}/hive-site.xml
+ADD hive-site-for-sql-base-auth.xml
${HIVE_TMP_CONF_DIR}/hive-site-for-sql-base-auth.xml
+ADD hiveserver2-site-for-sql-base-auth.xml
${HIVE_TMP_CONF_DIR}/hiveserver2-site-for-sql-base-auth.xml
################################################################################
# add mysql jdbc driver
diff --git a/dev/docker/hive/hive-site-for-sql-base-auth.xml
b/dev/docker/hive/hive-site-for-sql-base-auth.xml
new file mode 100644
index 000000000..1f5da73fe
--- /dev/null
+++ b/dev/docker/hive/hive-site-for-sql-base-auth.xml
@@ -0,0 +1,101 @@
+<configuration>
+ <property>
+ <name>hive.server2.enable.doAs</name>
+ <value>false</value>
+ <description>Disable user impersonation for HiveServer2</description>
+ </property>
+
+ <property>
+ <name>hive.users.in.admin.role</name>
+ <value>hive</value>
+ </property>
+
+ <property>
+ <name>hive.security.authorization.manager</name>
+
<value>org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdConfOnlyAuthorizerFactory</value>
+ </property>
+
+ <property>
+ <name>hive.security.metastore.authorization.manager</name>
+
<value>org.apache.hadoop.hive.ql.security.authorization.MetaStoreAuthzAPIAuthorizerEmbedOnly</value>
+ </property>
+
+ <property>
+ <name>hive.exec.scratchdir</name>
+ <value>/tmp</value>
+ <description>Scratch space for Hive jobs</description>
+ </property>
+
+ <property>
+ <name>mapred.child.java.opts</name>
+ <value>-Xmx4G -XX:+UseConcMarkSweepGC</value>
+ <description>Max memory for Map Reduce Jobs</description>
+ </property>
+
+ <property>
+ <name>javax.jdo.option.ConnectionURL</name>
+
<value>jdbc:mysql://localhost/metastore_db?createDatabaseIfNotExist=true&useSSL=false</value>
+ </property>
+
+ <property>
+ <name>javax.jdo.option.ConnectionUserName</name>
+ <value>hive</value>
+ </property>
+
+ <property>
+ <name>javax.jdo.option.ConnectionPassword</name>
+ <value>hive</value>
+ </property>
+
+ <property>
+ <name>javax.jdo.option.ConnectionDriverName</name>
+ <value>com.mysql.jdbc.Driver</value>
+ </property>
+
+ <property>
+ <name>hive.metastore.warehouse.dir</name>
+ <value>hdfs://__REPLACE__HOST_NAME:9000/user/hive/warehouse</value>
+ <description>location of default database for the warehouse</description>
+ </property>
+
+ <property>
+ <name>fs.s3a.access.key</name>
+ <value>S3_ACCESS_KEY_ID</value>
+ </property>
+
+ <property>
+ <name>fs.s3a.secret.key</name>
+ <value>S3_SECRET_KEY_ID</value>
+ </property>
+
+ <property>
+ <name>fs.s3a.endpoint</name>
+ <value>S3_ENDPOINT_ID</value>
+ </property>
+
+ <property>
+ <name>fs.s3a.aws.credentials.provider</name>
+
<value>org.apache.hadoop.fs.s3a.SimpleAWSCredentialsProvider,com.amazonaws.auth.EnvironmentVariableCredentialsProvider,org.apache.hadoop.fs.s3a.AnonymousAWSCredentialsProvider</value>
+ </property>
+
+ <property>
+ <name>fs.abfss.impl</name>
+ <value>org.apache.hadoop.fs.azurebfs.SecureAzureBlobFileSystem</value>
+ </property>
+
+ <property>
+ <name>fs.azure.account.key.ABS_ACCOUNT_NAME.dfs.core.windows.net</name>
+ <value>ABS_ACCOUNT_KEY</value>
+ </property>
+
+ <property>
+ <name>fs.gs.auth.service.account.enable</name>
+ <value>true</value>
+ </property>
+
+ <property>
+ <name>fs.gs.auth.service.account.json.keyfile</name>
+ <value>SERVICE_ACCOUNT_FILE</value>
+ </property>
+
+</configuration>
diff --git a/dev/docker/hive/hiveserver2-site-for-sql-base-auth.xml
b/dev/docker/hive/hiveserver2-site-for-sql-base-auth.xml
new file mode 100644
index 000000000..eddf3ac49
--- /dev/null
+++ b/dev/docker/hive/hiveserver2-site-for-sql-base-auth.xml
@@ -0,0 +1,35 @@
+<?xml version="1.0" encoding="UTF-8" standalone="no"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?><!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+<configuration>
+ <property>
+ <name>hive.security.authorization.enabled</name>
+ <value>true</value>
+ </property>
+ <property>
+ <name>hive.security.authorization.manager</name>
+
<value>org.apache.hadoop.hive.ql.security.authorization.plugin.sqlstd.SQLStdHiveAuthorizerFactory</value>
+ </property>
+ <property>
+ <name>hive.security.authenticator.manager</name>
+
<value>org.apache.hadoop.hive.ql.security.SessionStateUserAuthenticator</value>
+ </property>
+ <property>
+ <name>hive.conf.restricted.list</name>
+
<value>hive.security.authorization.enabled,hive.security.authorization.manager,hive.security.authenticator.manager</value>
+ </property>
+</configuration>
\ No newline at end of file
diff --git a/dev/docker/hive/start.sh b/dev/docker/hive/start.sh
index 93ab35e30..60bb90fee 100644
--- a/dev/docker/hive/start.sh
+++ b/dev/docker/hive/start.sh
@@ -39,6 +39,16 @@ cp -f ${HADOOP_TMP_CONF_DIR}/* ${HADOOP_CONF_DIR}
cp -f ${HIVE_TMP_CONF_DIR}/* ${HIVE_CONF_DIR}
sed -i "s/__REPLACE__HOST_NAME/$(hostname)/g" ${HADOOP_CONF_DIR}/core-site.xml
sed -i "s/__REPLACE__HOST_NAME/$(hostname)/g" ${HADOOP_CONF_DIR}/hdfs-site.xml
+
+if [[ -n "${ENABLE_JDBC_AUTHORIZATION}" ]]; then
+ if [[ -n "${RANGER_HIVE_REPOSITORY_NAME}" && -n "${RANGER_SERVER_URL}" ]];
then
+ echo "You can't set ENABLE_JDBC_AUTHORIZATION and
RANGER_HIVE_REPOSITORY_NAME at the same time."
+ exit -1
+ fi
+ cp -f ${HIVE_CONF_DIR}/hive-site-for-sql-base-auth.xml
${HIVE_CONF_DIR}/hive-site.xml
+ cp -f ${HIVE_CONF_DIR}/hiveserver2-site-for-sql-base-auth.xml
${HIVE_CONF_DIR}/hiveserver2-site.xml
+fi
+
sed -i "s/__REPLACE__HOST_NAME/$(hostname)/g" ${HIVE_CONF_DIR}/hive-site.xml
# whether S3 is set
diff --git a/docs/docker-image-details.md b/docs/docker-image-details.md
index 4e0a81093..c723c009d 100644
--- a/docs/docker-image-details.md
+++ b/docs/docker-image-details.md
@@ -168,6 +168,11 @@ Changelog
You can use this kind of image to test the catalog of Apache Hive.
Changelog
+
+- apache/gravitino-ci:hive-0.1.17
+ - Add support for JDBC SQL standard authorization
+ - Add JDBC SQL standard authorization related configuration in the
`hive-site-for-sql-base-auth.xml` and `hiveserver2-site-for-sql-base-auth.xml`
+-
- apache/gravitino-ci:hive-0.1.16
- Add GCS related configuration in the `hive-site.xml` file.
- Add GCS bundle jar in the `${HADOOP_HOME}/share/hadoop/common/lib/`