[ 
https://issues.apache.org/jira/browse/HIVE-26148?focusedWorklogId=764872&page=com.atlassian.jira.plugin.system.issuetabpanels:worklog-tabpanel#worklog-764872
 ]

ASF GitHub Bot logged work on HIVE-26148:
-----------------------------------------

                Author: ASF GitHub Bot
            Created on: 02/May/22 08:01
            Start Date: 02/May/22 08:01
    Worklog Time Spent: 10m 
      Work Description: pvary commented on code in PR #3218:
URL: https://github.com/apache/hive/pull/3218#discussion_r862658017


##########
ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchronizer.java:
##########
@@ -199,57 +206,58 @@ public void run() {
             LOG.info("Not selected as leader, skip");
             continue;
           }
-          int numDc = 0, numDb = 0, numTbl = 0;
+          int numDc = 0, numCat = 0, numDb = 0, numTbl = 0;
           for (String dcName : hiveClient.getAllDataConnectorNames()) {
             numDc++;
-            HiveObjectRef dcToRefresh = 
getObjToRefresh(HiveObjectType.DATACONNECTOR, null, dcName);
+            HiveObjectRef dcToRefresh = 
getObjToRefresh(HiveObjectType.DATACONNECTOR, null, null, dcName);
             PrivilegeBag grantDataConnectorBag = new PrivilegeBag();
-            addGrantPrivilegesToBag(policyProvider, grantDataConnectorBag, 
HiveObjectType.DATACONNECTOR,
-                    null, dcName, null, authorizer);
+            addGrantPrivilegesToBag(policyProvider, grantDataConnectorBag, 
HiveObjectType.DATACONNECTOR, null, null,
+                dcName, null, authorizer);
             hiveClient.refresh_privileges(dcToRefresh, authorizer, 
grantDataConnectorBag);
             LOG.debug("processing data connector: " + dcName);
           }
           LOG.info("Success synchronize privilege " + 
policyProvider.getClass().getName() + ":" + numDc + " dataconnectors");
 
-          for (String dbName : hiveClient.getAllDatabases()) {
-            numDb++;
-            HiveObjectRef dbToRefresh = 
getObjToRefresh(HiveObjectType.DATABASE, dbName, null);
-            PrivilegeBag grantDatabaseBag = new PrivilegeBag();
-            addGrantPrivilegesToBag(policyProvider, grantDatabaseBag, 
HiveObjectType.DATABASE,
-                dbName, null, null, authorizer);
-            hiveClient.refresh_privileges(dbToRefresh, authorizer, 
grantDatabaseBag);
-            LOG.debug("processing " + dbName);
+          for (String catName: hiveClient.getCatalogs()) {
+            numCat++;
+            for (String dbName : hiveClient.getAllDatabases(catName)) {
+              numDb++;
+              HiveObjectRef dbToRefresh = 
getObjToRefresh(HiveObjectType.DATABASE, catName, dbName, null);
+              PrivilegeBag grantDatabaseBag = new PrivilegeBag();
+              addGrantPrivilegesToBag(policyProvider, grantDatabaseBag, 
HiveObjectType.DATABASE, catName, dbName, null, null,
+                  authorizer);
+              hiveClient.refresh_privileges(dbToRefresh, authorizer, 
grantDatabaseBag);
+              LOG.debug("processing " + dbName);
 
-            for (String tblName : hiveClient.getAllTables(dbName)) {
-              numTbl++;
-              LOG.debug("processing " + dbName + "." + tblName);
-              HiveObjectRef tableToRefresh = 
getObjToRefresh(HiveObjectType.TABLE, dbName, tblName);
-              PrivilegeBag grantTableBag = new PrivilegeBag();
-              addGrantPrivilegesToBag(policyProvider, grantTableBag, 
HiveObjectType.TABLE,
-                  dbName, tblName, null, authorizer);
-              hiveClient.refresh_privileges(tableToRefresh, authorizer, 
grantTableBag);
+              for (String tblName : hiveClient.getAllTables(catName, dbName)) {
+                numTbl++;
+                LOG.debug("processing " + dbName + "." + tblName);
+                HiveObjectRef tableToRefresh = 
getObjToRefresh(HiveObjectType.TABLE, catName, dbName, tblName);
+                PrivilegeBag grantTableBag = new PrivilegeBag();
+                addGrantPrivilegesToBag(policyProvider, grantTableBag, 
HiveObjectType.TABLE, catName, dbName, tblName, null,
+                    authorizer);
+                hiveClient.refresh_privileges(tableToRefresh, authorizer, 
grantTableBag);
 
-              HiveObjectRef tableOfColumnsToRefresh = 
getObjToRefresh(HiveObjectType.COLUMN, dbName, tblName);
-              PrivilegeBag grantColumnBag = new PrivilegeBag();
-              Table tbl = null;
-              try {
-                tbl = hiveClient.getTable(dbName, tblName);
-                for (FieldSchema fs : tbl.getPartitionKeys()) {
-                  addGrantPrivilegesToBag(policyProvider, grantColumnBag, 
HiveObjectType.COLUMN,
-                          dbName, tblName, fs.getName(), authorizer);
-                }
-                for (FieldSchema fs : tbl.getSd().getCols()) {
-                  addGrantPrivilegesToBag(policyProvider, grantColumnBag, 
HiveObjectType.COLUMN,
-                          dbName, tblName, fs.getName(), authorizer);
+                HiveObjectRef tableOfColumnsToRefresh = 
getObjToRefresh(HiveObjectType.COLUMN, catName, dbName, tblName);
+                PrivilegeBag grantColumnBag = new PrivilegeBag();
+                Table tbl = null;
+                try {
+                  tbl = hiveClient.getTable(catName, dbName, tblName);
+                  for (FieldSchema fs : tbl.getPartitionKeys()) {
+                    addGrantPrivilegesToBag(policyProvider, grantColumnBag, 
HiveObjectType.COLUMN, catName, dbName, tblName, fs.getName(), authorizer);
+                  }
+                  for (FieldSchema fs : tbl.getSd().getCols()) {
+                    addGrantPrivilegesToBag(policyProvider, grantColumnBag, 
HiveObjectType.COLUMN, catName, dbName, tblName, fs.getName(), authorizer);
+                  }
+                  hiveClient.refresh_privileges(tableOfColumnsToRefresh, 
authorizer, grantColumnBag);
+                } catch (MetaException e) {
+                  LOG.debug("Unable to synchronize " + tblName + ":" + 
e.getMessage());
                 }
-                hiveClient.refresh_privileges(tableOfColumnsToRefresh, 
authorizer, grantColumnBag);
-              } catch (MetaException e) {
-                LOG.debug("Unable to synchronize " + tblName + ":" + 
e.getMessage());
               }
             }
           }
-          LOG.info("Success synchronize privilege " + 
policyProvider.getClass().getName() + ":" + numDb + " databases, "
-                  + numTbl + " tables");
+          LOG.info(String.format("Success synchronize privilege %s : %d 
catalogs, %d databases, %d tables.",

Review Comment:
   Maybe just keep as the other part with concatenation so at least it is 
consistens inside the file?



##########
ql/src/java/org/apache/hadoop/hive/ql/security/authorization/PrivilegeSynchronizer.java:
##########
@@ -199,57 +206,58 @@ public void run() {
             LOG.info("Not selected as leader, skip");
             continue;
           }
-          int numDc = 0, numDb = 0, numTbl = 0;
+          int numDc = 0, numCat = 0, numDb = 0, numTbl = 0;
           for (String dcName : hiveClient.getAllDataConnectorNames()) {
             numDc++;
-            HiveObjectRef dcToRefresh = 
getObjToRefresh(HiveObjectType.DATACONNECTOR, null, dcName);
+            HiveObjectRef dcToRefresh = 
getObjToRefresh(HiveObjectType.DATACONNECTOR, null, null, dcName);
             PrivilegeBag grantDataConnectorBag = new PrivilegeBag();
-            addGrantPrivilegesToBag(policyProvider, grantDataConnectorBag, 
HiveObjectType.DATACONNECTOR,
-                    null, dcName, null, authorizer);
+            addGrantPrivilegesToBag(policyProvider, grantDataConnectorBag, 
HiveObjectType.DATACONNECTOR, null, null,
+                dcName, null, authorizer);
             hiveClient.refresh_privileges(dcToRefresh, authorizer, 
grantDataConnectorBag);
             LOG.debug("processing data connector: " + dcName);
           }
           LOG.info("Success synchronize privilege " + 
policyProvider.getClass().getName() + ":" + numDc + " dataconnectors");
 
-          for (String dbName : hiveClient.getAllDatabases()) {
-            numDb++;
-            HiveObjectRef dbToRefresh = 
getObjToRefresh(HiveObjectType.DATABASE, dbName, null);
-            PrivilegeBag grantDatabaseBag = new PrivilegeBag();
-            addGrantPrivilegesToBag(policyProvider, grantDatabaseBag, 
HiveObjectType.DATABASE,
-                dbName, null, null, authorizer);
-            hiveClient.refresh_privileges(dbToRefresh, authorizer, 
grantDatabaseBag);
-            LOG.debug("processing " + dbName);
+          for (String catName: hiveClient.getCatalogs()) {
+            numCat++;
+            for (String dbName : hiveClient.getAllDatabases(catName)) {
+              numDb++;
+              HiveObjectRef dbToRefresh = 
getObjToRefresh(HiveObjectType.DATABASE, catName, dbName, null);
+              PrivilegeBag grantDatabaseBag = new PrivilegeBag();
+              addGrantPrivilegesToBag(policyProvider, grantDatabaseBag, 
HiveObjectType.DATABASE, catName, dbName, null, null,
+                  authorizer);
+              hiveClient.refresh_privileges(dbToRefresh, authorizer, 
grantDatabaseBag);
+              LOG.debug("processing " + dbName);
 
-            for (String tblName : hiveClient.getAllTables(dbName)) {
-              numTbl++;
-              LOG.debug("processing " + dbName + "." + tblName);
-              HiveObjectRef tableToRefresh = 
getObjToRefresh(HiveObjectType.TABLE, dbName, tblName);
-              PrivilegeBag grantTableBag = new PrivilegeBag();
-              addGrantPrivilegesToBag(policyProvider, grantTableBag, 
HiveObjectType.TABLE,
-                  dbName, tblName, null, authorizer);
-              hiveClient.refresh_privileges(tableToRefresh, authorizer, 
grantTableBag);
+              for (String tblName : hiveClient.getAllTables(catName, dbName)) {
+                numTbl++;
+                LOG.debug("processing " + dbName + "." + tblName);
+                HiveObjectRef tableToRefresh = 
getObjToRefresh(HiveObjectType.TABLE, catName, dbName, tblName);
+                PrivilegeBag grantTableBag = new PrivilegeBag();
+                addGrantPrivilegesToBag(policyProvider, grantTableBag, 
HiveObjectType.TABLE, catName, dbName, tblName, null,
+                    authorizer);
+                hiveClient.refresh_privileges(tableToRefresh, authorizer, 
grantTableBag);
 
-              HiveObjectRef tableOfColumnsToRefresh = 
getObjToRefresh(HiveObjectType.COLUMN, dbName, tblName);
-              PrivilegeBag grantColumnBag = new PrivilegeBag();
-              Table tbl = null;
-              try {
-                tbl = hiveClient.getTable(dbName, tblName);
-                for (FieldSchema fs : tbl.getPartitionKeys()) {
-                  addGrantPrivilegesToBag(policyProvider, grantColumnBag, 
HiveObjectType.COLUMN,
-                          dbName, tblName, fs.getName(), authorizer);
-                }
-                for (FieldSchema fs : tbl.getSd().getCols()) {
-                  addGrantPrivilegesToBag(policyProvider, grantColumnBag, 
HiveObjectType.COLUMN,
-                          dbName, tblName, fs.getName(), authorizer);
+                HiveObjectRef tableOfColumnsToRefresh = 
getObjToRefresh(HiveObjectType.COLUMN, catName, dbName, tblName);
+                PrivilegeBag grantColumnBag = new PrivilegeBag();
+                Table tbl = null;
+                try {
+                  tbl = hiveClient.getTable(catName, dbName, tblName);
+                  for (FieldSchema fs : tbl.getPartitionKeys()) {
+                    addGrantPrivilegesToBag(policyProvider, grantColumnBag, 
HiveObjectType.COLUMN, catName, dbName, tblName, fs.getName(), authorizer);
+                  }
+                  for (FieldSchema fs : tbl.getSd().getCols()) {
+                    addGrantPrivilegesToBag(policyProvider, grantColumnBag, 
HiveObjectType.COLUMN, catName, dbName, tblName, fs.getName(), authorizer);
+                  }
+                  hiveClient.refresh_privileges(tableOfColumnsToRefresh, 
authorizer, grantColumnBag);
+                } catch (MetaException e) {
+                  LOG.debug("Unable to synchronize " + tblName + ":" + 
e.getMessage());
                 }
-                hiveClient.refresh_privileges(tableOfColumnsToRefresh, 
authorizer, grantColumnBag);
-              } catch (MetaException e) {
-                LOG.debug("Unable to synchronize " + tblName + ":" + 
e.getMessage());
               }
             }
           }
-          LOG.info("Success synchronize privilege " + 
policyProvider.getClass().getName() + ":" + numDb + " databases, "
-                  + numTbl + " tables");
+          LOG.info(String.format("Success synchronize privilege %s : %d 
catalogs, %d databases, %d tables.",

Review Comment:
   Maybe just keep as the other part with concatenation so at least it is 
consistent inside the file?





Issue Time Tracking
-------------------

    Worklog Id:     (was: 764872)
    Time Spent: 2h 20m  (was: 2h 10m)

> Keep MetaStoreFilterHook interface compatibility after introducing catalogs
> ---------------------------------------------------------------------------
>
>                 Key: HIVE-26148
>                 URL: https://issues.apache.org/jira/browse/HIVE-26148
>             Project: Hive
>          Issue Type: Improvement
>          Components: Hive
>    Affects Versions: 3.0.0
>            Reporter: Wechar
>            Assignee: Wechar
>            Priority: Minor
>              Labels: pull-request-available
>             Fix For: 4.0.0-alpha-1
>
>          Time Spent: 2h 20m
>  Remaining Estimate: 0h
>
> Hive 3.0 introduce catalog concept, when we upgrade hive dependency version 
> from 2.3 to 3.x, we found some interfaces of *MetaStoreFilterHook* are not 
> compatible:
> {code:bash}
>  git show ba8a99e115 -- 
> standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java
> {code}
> {code:bash}
> --- 
> a/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java
> +++ 
> b/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/MetaStoreFilterHook.java
>    /**
>     * Filter given list of tables
> -   * @param dbName
> -   * @param tableList
> +   * @param catName catalog name
> +   * @param dbName database name
> +   * @param tableList list of table returned by the metastore
>     * @return List of filtered table names
>     */
> -  public List<String> filterTableNames(String dbName, List<String> 
> tableList) throws MetaException;
> +  List<String> filterTableNames(String catName, String dbName, List<String> 
> tableList)
> +      throws MetaException;
> {code}
> We can remain the previous interfaces and use the default catalog to 
> implement.



--
This message was sent by Atlassian Jira
(v8.20.7#820007)

Reply via email to