Repository: sentry
Updated Branches:
  refs/heads/master 5a7b07643 -> b87651cf9


SENTRY-2034: Add e2e tests for testing HMS notification processing.(Kalyan 
Kumar Kalvagadda, reviewed by Sergio Pena, Lina Li)


Project: http://git-wip-us.apache.org/repos/asf/sentry/repo
Commit: http://git-wip-us.apache.org/repos/asf/sentry/commit/b87651cf
Tree: http://git-wip-us.apache.org/repos/asf/sentry/tree/b87651cf
Diff: http://git-wip-us.apache.org/repos/asf/sentry/diff/b87651cf

Branch: refs/heads/master
Commit: b87651cf9b140f03d44297129fbe68b82052da8e
Parents: 5a7b076
Author: Kalyan Kumar Kalvagadda <kkal...@cloudera.com>
Authored: Tue Jan 16 14:08:54 2018 -0600
Committer: Kalyan Kumar Kalvagadda <kkal...@cloudera.com>
Committed: Tue Jan 16 14:48:55 2018 -0600

----------------------------------------------------------------------
 .../TestDbPrivilegeCleanupOnDrop.java           |  40 ++------
 .../TestHmsNotificationProcessing.java          |  92 +++++++++++++++++
 .../TestHmsNotificationProcessingBase.java      |  79 +++++++++++++++
 ...msNotificationProcessingWithOutHdfsSync.java | 101 +++++++++++++++++++
 ...tificationProcessingWithOutSyncOnCreate.java |  96 ++++++++++++++++++
 ...NotificationProcessingWithOutSyncOnDrop.java |  92 +++++++++++++++++
 .../e2e/hdfs/TestHDFSIntegrationAdvanced.java   |   6 ++
 .../tests/e2e/hdfs/TestHDFSIntegrationBase.java | 100 +++++++++++++-----
 .../e2e/hdfs/TestHDFSIntegrationEnd2End.java    |   7 +-
 .../e2e/hdfs/TestHDFSIntegrationWithHA.java     |   1 +
 10 files changed, 551 insertions(+), 63 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sentry/blob/b87651cf/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegeCleanupOnDrop.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegeCleanupOnDrop.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegeCleanupOnDrop.java
index 2138304..5fe6625 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegeCleanupOnDrop.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegeCleanupOnDrop.java
@@ -18,7 +18,6 @@
 package org.apache.sentry.tests.e2e.dbprovider;
 
 import static org.hamcrest.Matchers.is;
-import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.assertTrue;
 
@@ -26,7 +25,6 @@ import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
 import java.sql.Statement;
-import java.util.ArrayList;
 import java.util.List;
 
 import org.apache.sentry.tests.e2e.hdfs.TestHDFSIntegrationBase;
@@ -59,8 +57,6 @@ public class TestDbPrivilegeCleanupOnDrop extends 
TestHDFSIntegrationBase {
           DB1 = "db_1",
           DB2 = "db_2";
 
-  static final long WAIT_FOR_NOTIFICATION_PROCESSING = 5000;
-
   private Connection connection;
   private Statement statement;
 
@@ -326,13 +322,13 @@ public class TestDbPrivilegeCleanupOnDrop extends 
TestHDFSIntegrationBase {
   private void verifyTablePrivilegesDropped(Statement statement)
       throws Exception {
     List<String> roles = getRoles(statement);
-    verifyPrivilegeDropped(statement, roles, tableName1,
+    verifyIfAllPrivilegeAreDropped(statement, roles, tableName1,
         SHOW_GRANT_TABLE_POSITION);
-    verifyPrivilegeDropped(statement, roles, tableName2,
+    verifyIfAllPrivilegeAreDropped(statement, roles, tableName2,
         SHOW_GRANT_TABLE_POSITION);
-    verifyPrivilegeDropped(statement, roles, tableName3,
+    verifyIfAllPrivilegeAreDropped(statement, roles, tableName3,
         SHOW_GRANT_TABLE_POSITION);
-    verifyPrivilegeDropped(statement, roles, tableName4,
+    verifyIfAllPrivilegeAreDropped(statement, roles, tableName4,
         SHOW_GRANT_TABLE_POSITION);
 
   }
@@ -340,24 +336,9 @@ public class TestDbPrivilegeCleanupOnDrop extends 
TestHDFSIntegrationBase {
   // verify all the test privileges are dropped as we drop the objects
   private void verifyDbPrivilegesDropped(Statement statement) throws Exception 
{
     List<String> roles = getRoles(statement);
-    verifyPrivilegeDropped(statement, roles, DB2, SHOW_GRANT_DB_POSITION);
-    verifyPrivilegeDropped(statement, roles, DB1, SHOW_GRANT_DB_POSITION);
-
-  }
+    verifyIfAllPrivilegeAreDropped(statement, roles, DB2, 
SHOW_GRANT_DB_POSITION);
+    verifyIfAllPrivilegeAreDropped(statement, roles, DB1, 
SHOW_GRANT_DB_POSITION);
 
-  // verify given table/DB has no longer permissions
-  private void verifyPrivilegeDropped(Statement statement, List<String> roles,
-      String objectName, int resultPos) throws Exception {
-    for (String roleName : roles) {
-      ResultSet resultSet = statement.executeQuery("SHOW GRANT ROLE "
-          + roleName);
-      while (resultSet.next()) {
-        String returned = resultSet.getString(resultPos);
-        assertFalse("value " + objectName + " shouldn't be detected, but 
actually " + returned + " is found from resultSet",
-                objectName.equalsIgnoreCase(returned));
-          }
-      resultSet.close();
-    }
   }
 
   // verify given table is part of the role
@@ -370,13 +351,4 @@ public class TestDbPrivilegeCleanupOnDrop extends 
TestHDFSIntegrationBase {
       resultSet.close();
     }
   }
-
-  private List<String> getRoles(Statement statement) throws Exception {
-    ArrayList<String> roleList = Lists.newArrayList();
-    ResultSet resultSet = statement.executeQuery("SHOW ROLES ");
-    while (resultSet.next()) {
-      roleList.add(resultSet.getString(1));
-    }
-    return roleList;
-  }
 }

http://git-wip-us.apache.org/repos/asf/sentry/blob/b87651cf/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessing.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessing.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessing.java
new file mode 100644
index 0000000..e730dd5
--- /dev/null
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessing.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.dbprovider;
+
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.sentry.tests.e2e.hdfs.TestHDFSIntegrationBase;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.sql.Connection;
+import java.sql.Statement;
+
+public class TestHmsNotificationProcessing  extends 
TestHmsNotificationProcessingBase {
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    hiveSyncOnDrop = true;
+    hiveSyncOnCreate = true;
+    TestHDFSIntegrationBase.setup();
+  }
+  /*
+  Tests basic sanity of Hms notification processing by verifying below when 
new Hive objects are created
+  1. Making sure that HDFS ACL rules for an new Hive objects created.
+  2. Making sue that stale permissions are deleted for the new Hive object 
that are created.
+  3. Making sure that permissions are deleted when hive objects are deleted
+  */
+
+  @Test
+  public void testHmsNotificationProcessingSanity() throws Throwable {
+    dbNames = new String[]{DB1};
+    roles = new String[]{"admin_role", "read_db1", "select_tbl1"};
+    admin = "hive";
+
+    Connection connection = hiveServer2.createConnection(admin, admin);
+    Statement statement = connection.createStatement();
+    statement.execute("create role admin_role");
+    statement.execute("grant role admin_role to group hive");
+    statement.execute("grant all on server server1 to role admin_role");
+
+    // Add privileges for an objects that do not exist yet
+    statement.execute("create role read_db1");
+    statement.execute("create role select_tbl1");
+    statement.execute("grant role read_db1 to group hbase");
+    statement.execute("grant role select_tbl1 to group hbase");
+    statement.execute("grant select on database " + DB1 + " to role read_db1");
+    String str = "grant select on table " + DB1 + "." + tableName1
+            + " TO ROLE select_tbl1";
+    statement.execute(str);
+
+    //Add object
+    statement.execute("CREATE DATABASE " + DB1);
+    statement.execute("use " + DB1);
+    statement.execute("create table " + DB1 + "." + tableName1
+            + " (under_col int comment 'the under column', value string)");
+
+    Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    //Make sure that the privileges for that object are removed.
+    verifyPrivilegesDropped(statement);
+
+    //add "select" sentry permission for the object's
+    statement.execute("GRANT select ON DATABASE " + DB1 + " TO ROLE read_db1");
+    statement.execute("USE " + DB1);
+    statement.execute("GRANT SELECT ON TABLE " + tableName1
+            + " TO ROLE select_tbl1");
+
+    // Make sure that an ACL is added for that
+    verifyOnAllSubDirs("/user/hive/warehouse/db_1.db", FsAction.READ_EXECUTE, 
"hbase", true);
+    verifyOnAllSubDirs("/user/hive/warehouse/db_1.db/tb_1", 
FsAction.READ_EXECUTE, "hbase", true);
+
+    //Drop the object
+    statement.execute("DROP DATABASE " + DB1 + " CASCADE");
+
+    Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    //Make sure that the privileges added for that object are removed.
+    verifyPrivilegesDropped(statement);
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/b87651cf/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessingBase.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessingBase.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessingBase.java
new file mode 100644
index 0000000..ed80e88
--- /dev/null
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessingBase.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.dbprovider;
+
+import org.apache.sentry.tests.e2e.hdfs.TestHDFSIntegrationBase;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.sql.ResultSet;
+import java.sql.Statement;
+import java.util.List;
+
+import static org.junit.Assert.assertEquals;
+
+public class TestHmsNotificationProcessingBase  extends 
TestHDFSIntegrationBase{
+  static final Logger LOGGER = 
LoggerFactory.getLogger(TestHmsNotificationProcessingBase.class);
+  protected final static int SHOW_GRANT_TABLE_POSITION = 2;
+  protected final static int SHOW_GRANT_DB_POSITION = 1;
+  protected static final String DB1 = "db_1",
+          DB2 = "db_2",
+          tableName1 = "tb_1";
+
+  // verify all the test privileges are dropped as we drop the objects
+  protected void verifyPrivilegesDropped(Statement statement)
+          throws Exception {
+    verifyDbPrivilegesDropped(statement);
+    verifyTablePrivilegesDropped(statement);
+  }
+
+  // verify all the test privileges are dropped as we drop the objects
+  protected void verifyTablePrivilegesDropped(Statement statement)
+          throws Exception {
+    List<String> roles = getRoles(statement);
+    verifyIfAllPrivilegeAreDropped(statement, roles, tableName1,
+            SHOW_GRANT_TABLE_POSITION);
+  }
+
+  // verify all the test privileges are dropped as we drop the objects
+  protected void verifyDbPrivilegesDropped(Statement statement) throws 
Exception {
+    List<String> roles = getRoles(statement);
+    verifyIfAllPrivilegeAreDropped(statement, roles, DB2, 
SHOW_GRANT_DB_POSITION);
+    verifyIfAllPrivilegeAreDropped(statement, roles, DB1, 
SHOW_GRANT_DB_POSITION);
+
+  }
+
+  // verify all the test privileges are not dropped as we drop the objects
+  protected void verifyPrivilegesCount(Statement statement, int count)
+          throws Exception {
+    int privilegeCount = 0;
+    List<String> roles = getRoles(statement);
+    for (String roleName : roles) {
+      if(roleName.compareTo("admin_role") == 0) {
+        continue;
+      }
+      ResultSet resultSet = statement.executeQuery("SHOW GRANT ROLE "
+              + roleName);
+      while (resultSet.next()) {
+        privilegeCount++;
+      }
+      resultSet.close();
+    }
+    assertEquals("Privilege count do not match", count, privilegeCount);
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/b87651cf/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessingWithOutHdfsSync.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessingWithOutHdfsSync.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessingWithOutHdfsSync.java
new file mode 100644
index 0000000..9634ea1
--- /dev/null
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessingWithOutHdfsSync.java
@@ -0,0 +1,101 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.dbprovider;
+
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.sentry.tests.e2e.hdfs.TestHDFSIntegrationBase;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.sql.Connection;
+import java.sql.Statement;
+
+
+public class TestHmsNotificationProcessingWithOutHdfsSync extends 
TestHmsNotificationProcessingBase {
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    hdfsSyncEnabled = false;
+    hiveSyncOnDrop = true;
+    hiveSyncOnCreate = true;
+    TestHDFSIntegrationBase.setup();
+  }
+
+  /*
+  Tests basic sanity of Hms notification processing by verifying below when 
new Hive objects are created
+  1. Making sure that HDFS ACL rules for an new Hive objects are not added.
+  2. Making sure that stale permissions are deleted for the new Hive object 
that are created.
+  3. Making sure that permissions are deleted when hive objects are deleted
+  */
+
+  @Test
+  public void testHmsNotificationProcessingSanity() throws Throwable {
+    dbNames = new String[]{DB1};
+    roles = new String[]{"admin_role", "read_db1", "select_tbl1"};
+    admin = "hive";
+
+    Connection connection = hiveServer2.createConnection(admin, admin);
+    Statement statement = connection.createStatement();
+    statement.execute("create role admin_role");
+    statement.execute("grant role admin_role to group hive");
+    statement.execute("grant all on server server1 to role admin_role");
+
+    // Add privileges for an objects that do not exist yet
+    statement.execute("create role read_db1");
+    statement.execute("create role select_tbl1");
+    statement.execute("grant role read_db1 to group hbase");
+    statement.execute("grant role select_tbl1 to group hbase");
+
+    //add "select" sentry permission for the object's
+    statement.execute("grant select on database " + DB1 + " to role read_db1");
+    String str = "grant select on table " + DB1 + "." + tableName1
+            + " TO ROLE select_tbl1";
+    statement.execute(str);
+
+
+    Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    verifyPrivilegesCount(statement, 2);
+
+    //Add object
+    statement.execute("CREATE DATABASE " + DB1);
+    statement.execute("use " + DB1);
+    statement.execute("create table " + DB1 + "." + tableName1
+            + " (under_col int comment 'the under column', value string)");
+
+    Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    //Make sure that the privileges for that object are removed.
+    verifyPrivilegesCount(statement, 0);
+
+    //add "select" sentry permission for the object's
+    statement.execute("GRANT select ON DATABASE " + DB1 + " TO ROLE read_db1");
+    statement.execute("USE " + DB1);
+    statement.execute("GRANT SELECT ON TABLE " + tableName1
+            + " TO ROLE select_tbl1");
+
+    // Make sure that an ACL is added for that
+    verifyOnAllSubDirs("/user/hive/warehouse/db_1.db", FsAction.READ_EXECUTE, 
"hbase", false);
+    verifyOnAllSubDirs("/user/hive/warehouse/db_1.db/tb_1", 
FsAction.READ_EXECUTE, "hbase", false);
+
+    //Drop the object
+    statement.execute("DROP DATABASE " + DB1 + " CASCADE");
+
+    Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    //Make sure that the privileges added for that object are removed.
+    verifyPrivilegesCount(statement, 0);
+  }
+ }

http://git-wip-us.apache.org/repos/asf/sentry/blob/b87651cf/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessingWithOutSyncOnCreate.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessingWithOutSyncOnCreate.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessingWithOutSyncOnCreate.java
new file mode 100644
index 0000000..0339c7b
--- /dev/null
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessingWithOutSyncOnCreate.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.dbprovider;
+
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.sentry.tests.e2e.hdfs.TestHDFSIntegrationBase;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.sql.Connection;
+import java.sql.Statement;
+
+public class TestHmsNotificationProcessingWithOutSyncOnCreate extends 
TestHmsNotificationProcessingBase {
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    hiveSyncOnCreate = false;
+    hiveSyncOnDrop = true;
+    TestHDFSIntegrationBase.setup();
+  }
+
+  /*
+  Tests basic sanity of Hms notification processing by verifying below when 
new Hive objects are created
+  1. Making sure that HDFS ACL rules for an new Hive objects created.
+  2. Making sure that stale permissions are not deleted for the new Hive 
object that are created.
+  3. Making sure that permissions are deleted when hive objects are deleted
+  */
+  @Test
+  public void testHmsNotificationProcessingSanity() throws Throwable {
+    dbNames = new String[]{DB1};
+    roles = new String[]{"admin_role", "read_db1", "select_tbl1"};
+    admin = "hive";
+
+    Connection connection = hiveServer2.createConnection(admin, admin);
+    Statement statement = connection.createStatement();
+    statement.execute("create role admin_role");
+    statement.execute("grant role admin_role to group hive");
+    statement.execute("grant all on server server1 to role admin_role");
+
+    // Add privileges for an objects that do not exist yet
+    statement.execute("create role read_db1");
+    statement.execute("create role select_tbl1");
+    statement.execute("grant role read_db1 to group hbase");
+    statement.execute("grant role select_tbl1 to group hbase");
+
+    //add "select" sentry permission for the object's
+    statement.execute("grant select on database " + DB1 + " to role read_db1");
+    String str = "grant select on table " + DB1 + "." + tableName1
+            + " TO ROLE select_tbl1";
+    statement.execute(str);
+
+    //Add object
+    statement.execute("CREATE DATABASE " + DB1);
+    statement.execute("use " + DB1);
+    statement.execute("create table " + DB1 + "." + tableName1
+            + " (under_col int comment 'the under column', value string)");
+
+    Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    //Make sure that the privileges for that object are removed.
+    verifyPrivilegesCount(statement, 2);
+
+    //add "select" sentry permission for the object's
+    statement.execute("GRANT select ON DATABASE " + DB1 + " TO ROLE read_db1");
+    statement.execute("USE " + DB1);
+    statement.execute("GRANT SELECT ON TABLE " + tableName1
+            + " TO ROLE select_tbl1");
+
+
+    // Make sure that an ACL is added for that
+    verifyOnAllSubDirs("/user/hive/warehouse/db_1.db", FsAction.READ_EXECUTE, 
"hbase", true);
+    verifyOnAllSubDirs("/user/hive/warehouse/db_1.db/tb_1", 
FsAction.READ_EXECUTE, "hbase", true);
+
+    //Drop the object
+    statement.execute("DROP DATABASE " + DB1 + " CASCADE");
+
+    Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    //Make sure that the privileges added for that object are removed.
+    verifyPrivilegesCount(statement, 0);
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/b87651cf/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessingWithOutSyncOnDrop.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessingWithOutSyncOnDrop.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessingWithOutSyncOnDrop.java
new file mode 100644
index 0000000..f70b6ab
--- /dev/null
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestHmsNotificationProcessingWithOutSyncOnDrop.java
@@ -0,0 +1,92 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.sentry.tests.e2e.dbprovider;
+
+import org.apache.hadoop.fs.permission.FsAction;
+import org.apache.sentry.tests.e2e.hdfs.TestHDFSIntegrationBase;
+import org.junit.BeforeClass;
+import org.junit.Test;
+
+import java.sql.Connection;
+import java.sql.Statement;
+
+public class TestHmsNotificationProcessingWithOutSyncOnDrop extends 
TestHmsNotificationProcessingBase {
+
+  @BeforeClass
+  public static void setup() throws Exception {
+    hiveSyncOnDrop = false;
+    hiveSyncOnCreate = true;
+    hdfsSyncEnabled = true;
+    TestHDFSIntegrationBase.setup();
+  }
+
+  /*
+  Tests basic sanity of Hms notification processing by verifying below when 
new Hive objects are created
+  1. Making sure that HDFS ACL rules for an new Hive objects created.
+  2. Making sure that stale permissions are deleted for the new Hive object 
that are created.
+  3. Making sure that permissions are not deleted when hive objects are deleted
+  */
+
+  @Test
+  public void testHmsNotificationProcessingSanity() throws Throwable {
+    dbNames = new String[]{DB1};
+    roles = new String[]{"admin_role", "read_db1", "select_tbl1"};
+    admin = "hive";
+
+    Connection connection = hiveServer2.createConnection(admin, admin);
+    Statement statement = connection.createStatement();
+    statement.execute("create role admin_role");
+    statement.execute("grant role admin_role to group hive");
+    statement.execute("grant all on server server1 to role admin_role");
+
+    // Add privileges for an objects that do not exist yet
+    statement.execute("create role read_db1");
+    statement.execute("create role select_tbl1");
+    statement.execute("grant role read_db1 to group hbase");
+    statement.execute("grant role select_tbl1 to group hbase");
+
+    //Add object
+    statement.execute("CREATE DATABASE " + DB1);
+    statement.execute("use " + DB1);
+    statement.execute("create table " + DB1 + "." + tableName1
+            + " (under_col int comment 'the under column', value string)");
+
+    Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    //Make sure that the privileges for that object are removed.
+    verifyPrivilegesCount(statement, 0);
+
+    //add "select" sentry permission for the object's
+    statement.execute("GRANT select ON DATABASE " + DB1 + " TO ROLE read_db1");
+    statement.execute("USE " + DB1);
+    statement.execute("GRANT SELECT ON TABLE " + tableName1
+            + " TO ROLE select_tbl1");
+
+    verifyPrivilegesCount(statement, 2);
+
+    // Make sure that an ACL is added for that
+    verifyOnAllSubDirs("/user/hive/warehouse/db_1.db", FsAction.READ_EXECUTE, 
"hbase", true);
+    verifyOnAllSubDirs("/user/hive/warehouse/db_1.db/tb_1", 
FsAction.READ_EXECUTE, "hbase", true);
+
+    //Drop the object
+    statement.execute("DROP DATABASE " + DB1 + " CASCADE");
+
+    Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    //Make sure that the privileges added for that object are removed.
+    verifyPrivilegesCount(statement, 2);
+  }
+}

http://git-wip-us.apache.org/repos/asf/sentry/blob/b87651cf/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationAdvanced.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationAdvanced.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationAdvanced.java
index 33ace57..c8fc019 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationAdvanced.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationAdvanced.java
@@ -31,6 +31,7 @@ import org.apache.sentry.hdfs.PathsUpdate;
 import org.apache.sentry.tests.e2e.hive.StaticUserGroup;
 
 import org.junit.Assert;
+import org.junit.BeforeClass;
 import org.junit.Test;
 
 import org.slf4j.Logger;
@@ -46,6 +47,11 @@ public class TestHDFSIntegrationAdvanced extends 
TestHDFSIntegrationBase {
   private static final Logger LOGGER = LoggerFactory
       .getLogger(TestHDFSIntegrationAdvanced.class);
 
+  @BeforeClass
+  public static void setup() throws Exception{
+    hdfsSyncEnabled = true;
+    TestHDFSIntegrationBase.setup();
+  }
   @Test
   public void testNoPartitionInsert() throws Throwable {
     dbNames = new String[]{"db1"};

http://git-wip-us.apache.org/repos/asf/sentry/blob/b87651cf/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationBase.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationBase.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationBase.java
index c8eef09..4cd00e6 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationBase.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationBase.java
@@ -41,6 +41,7 @@ import java.util.concurrent.atomic.AtomicBoolean;
 
 import com.google.common.base.Preconditions;
 
+import com.google.common.collect.Lists;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataOutputStream;
 import org.apache.hadoop.fs.FileStatus;
@@ -96,6 +97,7 @@ import com.google.common.io.Files;
 import com.google.common.io.Resources;
 
 import static 
org.apache.sentry.hdfs.ServiceConstants.ServerConfig.SENTRY_HDFS_INTEGRATION_PATH_PREFIXES;
+import static org.junit.Assert.assertFalse;
 
 /**
  * Base abstract class for HDFS Sync integration
@@ -170,6 +172,9 @@ public abstract class TestHDFSIntegrationBase {
           ServerConfig.SENTRY_HMSFOLLOWER_INIT_DELAY_MILLS_DEFAULT +
               ServerConfig.SENTRY_HMSFOLLOWER_INTERVAL_MILLS_DEFAULT * 2 + 
CACHE_REFRESH * 2;
 
+  protected static final long HMSFOLLOWER_INTERVAL_MILLS = 50;
+  protected static final long WAIT_FOR_NOTIFICATION_PROCESSING = 
HMSFOLLOWER_INTERVAL_MILLS * 3;
+
   // Time to wait before running next tests. The unit is milliseconds.
   // Deleting HDFS may finish, but HDFS may not be ready for creating the same 
file again.
   // We need to to make sure that creating the same file in the next test will 
succeed
@@ -194,7 +199,11 @@ public abstract class TestHDFSIntegrationBase {
   protected String[] dbNames;
   protected String[] roles;
   protected String admin;
+  protected static Boolean hdfsSyncEnabled = true;
+  protected static Boolean hiveSyncOnCreate = false;
+  protected static Boolean hiveSyncOnDrop = true;
   protected static Configuration hadoopConf;
+  protected static final Map<String, String> sentryProperties = 
Maps.newHashMap();
 
   protected static File assertCreateDir(File dir) {
     if(!dir.isDirectory()) {
@@ -251,7 +260,7 @@ public abstract class TestHDFSIntegrationBase {
       if (groupShouldExist) {
         Assert.assertEquals("Error at verifying Path action : " + p + " ;", 
fsAction, getAcls(p).get(group));
       } else {
-        Assert.assertFalse("Error at verifying Path : " + p + " ," +
+        assertFalse("Error at verifying Path : " + p + " ," +
             " group : " + group + " ;", getAcls(p).containsKey(group));
       }
       LOGGER.info("Successfully found acls for path = " + p.getName());
@@ -347,7 +356,7 @@ public abstract class TestHDFSIntegrationBase {
     try {
       fs.listFiles(p, true);
       if(!hasPermission) {
-        Assert.assertFalse("Expected listing files to fail", false);
+        assertFalse("Expected listing files to fail", false);
       }
     } catch (Exception e) {
       if(hasPermission) {
@@ -415,6 +424,30 @@ public abstract class TestHDFSIntegrationBase {
 
   }
 
+  // verify given table/DB has no longer permissions
+  protected void verifyIfAllPrivilegeAreDropped(Statement statement, 
List<String> roles,
+                                                String objectName, int 
resultPos) throws Exception {
+    for (String roleName : roles) {
+      ResultSet resultSet = statement.executeQuery("SHOW GRANT ROLE "
+              + roleName);
+      while (resultSet.next()) {
+        String returned = resultSet.getString(resultPos);
+        assertFalse("value " + objectName + " shouldn't be detected, but 
actually " + returned + " is found from resultSet",
+                objectName.equalsIgnoreCase(returned));
+      }
+      resultSet.close();
+    }
+  }
+
+  protected List<String> getRoles(Statement statement) throws Exception {
+    ArrayList<String> roleList = Lists.newArrayList();
+    ResultSet resultSet = statement.executeQuery("SHOW ROLES ");
+    while (resultSet.next()) {
+      roleList.add(resultSet.getString(1));
+    }
+    return roleList;
+  }
+
   protected void loadDataTwoCols(Statement stmt) throws IOException, 
SQLException {
     FSDataOutputStream f1 = miniDFS.getFileSystem().create(new 
Path("/tmp/f2.txt"));
     f1.writeChars("m1d1_t1, m1d1_t2\n");
@@ -800,37 +833,48 @@ public abstract class TestHDFSIntegrationBase {
         public Void run() throws Exception {
           Configuration sentryConf = new Configuration(false);
           sentryConf.set(SENTRY_HDFS_INTEGRATION_PATH_PREFIXES, 
MANAGED_PREFIXES);
-          Map<String, String> properties = Maps.newHashMap();
-          properties.put(HiveServerFactory.AUTHZ_PROVIDER_BACKEND,
+          sentryProperties.put(HiveServerFactory.AUTHZ_PROVIDER_BACKEND,
               SimpleDBProviderBackend.class.getName());
-          properties.put(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY.varname,
+          
sentryProperties.put(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY.varname,
               SentryHiveAuthorizationTaskFactoryImpl.class.getName());
-          properties
+          sentryProperties
               .put(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS.varname, 
"2");
-          properties.put("hive.exec.local.scratchdir", 
Files.createTempDir().getAbsolutePath());
-          properties.put(ServerConfig.SECURITY_MODE, 
ServerConfig.SECURITY_MODE_NONE);
-//        properties.put("sentry.service.server.compact.transport", "true");
-          properties.put("sentry.hive.testing.mode", "true");
-          properties.put("sentry.service.reporting", "JMX");
-          properties.put(ServerConfig.ADMIN_GROUPS, "hive,admin");
-          properties.put(ServerConfig.RPC_ADDRESS, "localhost");
-          properties.put(ServerConfig.RPC_PORT, String.valueOf(sentryPort > 0 
? sentryPort : 0));
-          properties.put(ServerConfig.SENTRY_VERIFY_SCHEM_VERSION, "false");
-          properties.put("sentry.hive.server", "server1");
-
-          properties.put(ServerConfig.SENTRY_STORE_GROUP_MAPPING, 
ServerConfig.SENTRY_STORE_LOCAL_GROUP_MAPPING);
-          properties.put(ServerConfig.SENTRY_STORE_GROUP_MAPPING_RESOURCE, 
policyFileLocation.getPath());
-          properties.put(ServerConfig.SENTRY_STORE_JDBC_URL,
+          sentryProperties.put("hive.exec.local.scratchdir", 
Files.createTempDir().getAbsolutePath());
+          sentryProperties.put(ServerConfig.SECURITY_MODE, 
ServerConfig.SECURITY_MODE_NONE);
+//        sentryProperties.put("sentry.service.server.compact.transport", 
"true");
+          sentryProperties.put("sentry.hive.testing.mode", "true");
+          sentryProperties.put("sentry.service.reporting", "JMX");
+          sentryProperties.put(ServerConfig.ADMIN_GROUPS, "hive,admin");
+          sentryProperties.put(ServerConfig.RPC_ADDRESS, "localhost");
+          sentryProperties.put(ServerConfig.RPC_PORT, 
String.valueOf(sentryPort > 0 ? sentryPort : 0));
+          sentryProperties.put(ServerConfig.SENTRY_VERIFY_SCHEM_VERSION, 
"false");
+          sentryProperties.put("sentry.hive.server", "server1");
+
+          sentryProperties.put(ServerConfig.SENTRY_STORE_GROUP_MAPPING, 
ServerConfig.SENTRY_STORE_LOCAL_GROUP_MAPPING);
+          
sentryProperties.put(ServerConfig.SENTRY_STORE_GROUP_MAPPING_RESOURCE, 
policyFileLocation.getPath());
+          sentryProperties.put(ServerConfig.SENTRY_STORE_JDBC_URL,
               "jdbc:derby:;databaseName=" + baseDir.getPath()
                   + "/sentrystore_db;create=true");
-          properties.put(ServerConfig.SENTRY_STORE_JDBC_PASS, "dummy");
-          properties.put("sentry.service.processor.factories",
-              
"org.apache.sentry.provider.db.service.thrift.SentryPolicyStoreProcessorFactory,org.apache.sentry.hdfs.SentryHDFSServiceProcessorFactory");
-          properties.put("sentry.policy.store.plugins", 
"org.apache.sentry.hdfs.SentryPlugin");
-          properties.put(ServerConfig.SENTRY_HMSFOLLOWER_INIT_DELAY_MILLS, 
"10000");
-          properties.put(ServerConfig.SENTRY_HMSFOLLOWER_INTERVAL_MILLS, "50");
-          properties.put(ServerConfig.RPC_MIN_THREADS, "3");
-          for (Map.Entry<String, String> entry : properties.entrySet()) {
+          sentryProperties.put(ServerConfig.SENTRY_STORE_JDBC_PASS, "dummy");
+          
sentryProperties.put(ServerConfig.SENTRY_HMSFOLLOWER_INIT_DELAY_MILLS, "10000");
+          sentryProperties.put(ServerConfig.SENTRY_HMSFOLLOWER_INTERVAL_MILLS, 
String.valueOf(HMSFOLLOWER_INTERVAL_MILLS));
+          sentryProperties.put(ServerConfig.RPC_MIN_THREADS, "3");
+          if(hiveSyncOnCreate) {
+            sentryProperties.put("sentry.hive.sync.create", "true");
+          } else {
+            sentryProperties.put("sentry.hive.sync.create", "false");
+          }
+          if(hiveSyncOnDrop) {
+            sentryProperties.put("sentry.hive.sync.drop", "true");
+          } else {
+            sentryProperties.put("sentry.hive.sync.drop", "false");
+          }
+          if(hdfsSyncEnabled) {
+            sentryProperties.put("sentry.service.processor.factories",
+                    
"org.apache.sentry.provider.db.service.thrift.SentryPolicyStoreProcessorFactory,org.apache.sentry.hdfs.SentryHDFSServiceProcessorFactory");
+            sentryProperties.put("sentry.policy.store.plugins", 
"org.apache.sentry.hdfs.SentryPlugin");
+          }
+            for (Map.Entry<String, String> entry : 
sentryProperties.entrySet()) {
             sentryConf.set(entry.getKey(), entry.getValue());
           }
           sentryServer = 
SentrySrvFactory.create(SentrySrvFactory.SentrySrvType.INTERNAL_SERVER,

http://git-wip-us.apache.org/repos/asf/sentry/blob/b87651cf/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationEnd2End.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationEnd2End.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationEnd2End.java
index a909b91..061900a 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationEnd2End.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationEnd2End.java
@@ -26,6 +26,7 @@ import org.apache.hadoop.fs.permission.FsAction;
 import org.apache.sentry.tests.e2e.hive.StaticUserGroup;
 
 import org.junit.Assert;
+import org.junit.BeforeClass;
 import org.junit.Ignore;
 import org.junit.Test;
 
@@ -46,7 +47,11 @@ public class TestHDFSIntegrationEnd2End extends 
TestHDFSIntegrationBase {
 
   private static String adminRole = "admin_role";
 
-
+  @BeforeClass
+  public static void setup() throws Exception{
+    hdfsSyncEnabled = true;
+    TestHDFSIntegrationBase.setup();
+  }
   @Test
   public void testEnd2EndManagedPaths() throws Throwable {
     tmpHDFSDir = new Path("/tmp/external");

http://git-wip-us.apache.org/repos/asf/sentry/blob/b87651cf/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationWithHA.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationWithHA.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationWithHA.java
index cc0449b..96a2f90 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationWithHA.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationWithHA.java
@@ -22,6 +22,7 @@ import org.junit.BeforeClass;
 public class TestHDFSIntegrationWithHA extends TestHDFSIntegrationEnd2End {
   @BeforeClass
   public static void setup() throws Exception {
+    hdfsSyncEnabled = true;
     TestHDFSIntegrationBase.testSentryHA = true;
     TestHDFSIntegrationBase.setup();
   }

Reply via email to