This is an automated email from the ASF dual-hosted git repository.
lzljs3620320 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/paimon.git
The following commit(s) were added to refs/heads/master by this push:
new 9a334c80d [hive] Sync db owner prop to hms when creating the db (#4544)
9a334c80d is described below
commit 9a334c80d6fc1e4c0bfc13561a25eda674990d29
Author: Zouxxyy <[email protected]>
AuthorDate: Tue Nov 19 08:53:19 2024 +0800
[hive] Sync db owner prop to hms when creating the db (#4544)
---
.../java/org/apache/paimon/catalog/Catalog.java | 6 +++--
.../java/org/apache/paimon/hive/HiveCatalog.java | 11 ++++++---
.../spark/sql/DDLWithHiveCatalogTestBase.scala | 27 +++++++++-------------
3 files changed, 23 insertions(+), 21 deletions(-)
diff --git a/paimon-core/src/main/java/org/apache/paimon/catalog/Catalog.java
b/paimon-core/src/main/java/org/apache/paimon/catalog/Catalog.java
index 6a6a047bd..d919c5978 100644
--- a/paimon-core/src/main/java/org/apache/paimon/catalog/Catalog.java
+++ b/paimon-core/src/main/java/org/apache/paimon/catalog/Catalog.java
@@ -52,10 +52,12 @@ public interface Catalog extends AutoCloseable {
String SYSTEM_TABLE_SPLITTER = "$";
String SYSTEM_DATABASE_NAME = "sys";
String SYSTEM_BRANCH_PREFIX = "branch_";
- String COMMENT_PROP = "comment";
String TABLE_DEFAULT_OPTION_PREFIX = "table-default.";
- String DB_LOCATION_PROP = "location";
String DB_SUFFIX = ".db";
+
+ String COMMENT_PROP = "comment";
+ String OWNER_PROP = "owner";
+ String DB_LOCATION_PROP = "location";
String NUM_ROWS_PROP = "numRows";
String NUM_FILES_PROP = "numFiles";
String TOTAL_SIZE_PROP = "totalSize";
diff --git
a/paimon-hive/paimon-hive-catalog/src/main/java/org/apache/paimon/hive/HiveCatalog.java
b/paimon-hive/paimon-hive-catalog/src/main/java/org/apache/paimon/hive/HiveCatalog.java
index 8b8b62934..eed0fdb9b 100644
---
a/paimon-hive/paimon-hive-catalog/src/main/java/org/apache/paimon/hive/HiveCatalog.java
+++
b/paimon-hive/paimon-hive-catalog/src/main/java/org/apache/paimon/hive/HiveCatalog.java
@@ -283,6 +283,8 @@ public class HiveCatalog extends AbstractCatalog {
(key, value) -> {
if (key.equals(COMMENT_PROP)) {
database.setDescription(value);
+ } else if (key.equals(OWNER_PROP)) {
+ database.setOwnerName(value);
} else if (key.equals(DB_LOCATION_PROP)) {
database.setLocationUri(value);
} else if (value != null) {
@@ -299,12 +301,15 @@ public class HiveCatalog extends AbstractCatalog {
try {
Database database = clients.run(client ->
client.getDatabase(name));
Map<String, String> options = new
HashMap<>(database.getParameters());
- if (database.getLocationUri() != null) {
- options.put(DB_LOCATION_PROP, database.getLocationUri());
- }
if (database.getDescription() != null) {
options.put(COMMENT_PROP, database.getDescription());
}
+ if (database.getOwnerName() != null) {
+ options.put(OWNER_PROP, database.getOwnerName());
+ }
+ if (database.getLocationUri() != null) {
+ options.put(DB_LOCATION_PROP, database.getLocationUri());
+ }
return org.apache.paimon.catalog.Database.of(name, options,
database.getDescription());
} catch (NoSuchObjectException e) {
throw new DatabaseNotExistException(name);
diff --git
a/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/DDLWithHiveCatalogTestBase.scala
b/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/DDLWithHiveCatalogTestBase.scala
index 7478f9628..33b993160 100644
---
a/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/DDLWithHiveCatalogTestBase.scala
+++
b/paimon-spark/paimon-spark-common/src/test/scala/org/apache/paimon/spark/sql/DDLWithHiveCatalogTestBase.scala
@@ -189,6 +189,7 @@ abstract class DDLWithHiveCatalogTestBase extends
PaimonHiveTestBase {
val props = getDatabaseProps("paimon_db")
Assertions.assertEquals(props("k1"), "v1")
Assertions.assertEquals(props("k2"), "v2")
+ Assertions.assertTrue(getDatabaseOwner("paimon_db").nonEmpty)
}
}
}
@@ -296,29 +297,23 @@ abstract class DDLWithHiveCatalogTestBase extends
PaimonHiveTestBase {
}
}
- def getDatabaseLocation(dbName: String): String = {
+ def getDatabaseProp(dbName: String, propertyName: String): String = {
spark
- .sql(s"DESC DATABASE $dbName")
- .filter("info_name == 'Location'")
+ .sql(s"DESC DATABASE EXTENDED $dbName")
+ .filter(s"info_name == '$propertyName'")
.head()
.getAs[String]("info_value")
- .split(":")(1)
}
- def getDatabaseComment(dbName: String): String = {
- spark
- .sql(s"DESC DATABASE $dbName")
- .filter("info_name == 'Comment'")
- .head()
- .getAs[String]("info_value")
- }
+ def getDatabaseLocation(dbName: String): String =
+ getDatabaseProp(dbName, "Location").split(":")(1)
+
+ def getDatabaseComment(dbName: String): String = getDatabaseProp(dbName,
"Comment")
+
+ def getDatabaseOwner(dbName: String): String = getDatabaseProp(dbName,
"Owner")
def getDatabaseProps(dbName: String): Map[String, String] = {
- val dbPropsStr = spark
- .sql(s"DESC DATABASE EXTENDED $dbName")
- .filter("info_name == 'Properties'")
- .head()
- .getAs[String]("info_value")
+ val dbPropsStr = getDatabaseProp(dbName, "Properties")
val pattern = "\\(([^,]+),([^)]+)\\)".r
pattern
.findAllIn(dbPropsStr.drop(1).dropRight(1))