Revert "CDH-61466: Failed to upgrade CDH5.13.0 to CDH6 with message "The Sentry 
store schema version 1.5.0-cdh5 is different from distribution version 2.0.0"( 
Kalyan Kumar Kalvagadda, reviewed-by Sergio Pena)"

This reverts commit 0c4f5e78fd59ea8f756535352b71f2f9eb76dacc.


Project: http://git-wip-us.apache.org/repos/asf/sentry/repo
Commit: http://git-wip-us.apache.org/repos/asf/sentry/commit/36128ec2
Tree: http://git-wip-us.apache.org/repos/asf/sentry/tree/36128ec2
Diff: http://git-wip-us.apache.org/repos/asf/sentry/diff/36128ec2

Branch: refs/heads/master
Commit: 36128ec2f64fc31dc215808fdb00b4516d0aa1ff
Parents: 913be9a
Author: Kalyan Kumar Kalvagadda <kkal...@cloudera.com>
Authored: Tue Nov 7 19:18:14 2017 -0600
Committer: Kalyan Kumar Kalvagadda <kkal...@cloudera.com>
Committed: Tue Nov 7 19:23:05 2017 -0600

----------------------------------------------------------------------
 .../TestDbPrivilegeCleanupOnDrop.java           | 142 +++++++++----------
 .../tests/e2e/hdfs/TestHDFSIntegrationBase.java |   2 -
 2 files changed, 70 insertions(+), 74 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/sentry/blob/36128ec2/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegeCleanupOnDrop.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegeCleanupOnDrop.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegeCleanupOnDrop.java
index 2138304..33c9124 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegeCleanupOnDrop.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestDbPrivilegeCleanupOnDrop.java
@@ -22,6 +22,8 @@ import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertThat;
 import static org.junit.Assert.assertTrue;
 
+import java.io.File;
+import java.io.FileOutputStream;
 import java.sql.Connection;
 import java.sql.ResultSet;
 import java.sql.SQLException;
@@ -29,78 +31,67 @@ import java.sql.Statement;
 import java.util.ArrayList;
 import java.util.List;
 
-import org.apache.sentry.tests.e2e.hdfs.TestHDFSIntegrationBase;
-import org.apache.sentry.tests.e2e.hive.StaticUserGroup;
+import org.apache.sentry.service.thrift.HMSFollower;
+import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration;
+import org.junit.BeforeClass;
 import org.junit.Before;
+import org.junit.After;
 import org.junit.Test;
 
 import com.google.common.collect.Lists;
+import com.google.common.io.Resources;
 
-public class TestDbPrivilegeCleanupOnDrop extends TestHDFSIntegrationBase {
+public class TestDbPrivilegeCleanupOnDrop extends
+    AbstractTestWithStaticConfiguration {
 
   private final static int SHOW_GRANT_TABLE_POSITION = 2;
   private final static int SHOW_GRANT_DB_POSITION = 1;
 
+  private final String SINGLE_TYPE_DATA_FILE_NAME = "kv1.dat";
+
   private final static String tableName1 = "tb_1";
   private final static String tableName2 = "tb_2";
   private final static String tableName3 = "tb_3";
   private final static String tableName4 = "tb_4";
   private final static String renameTag = "_new";
 
-  protected static final String ALL_DB1 = "server=server1->db=db_1",
-          ADMIN1 = StaticUserGroup.ADMIN1,
-          ADMINGROUP = StaticUserGroup.ADMINGROUP,
-          USER1_1 = StaticUserGroup.USER1_1,
-          USER2_1 = StaticUserGroup.USER2_1,
-          USER3_1 = StaticUserGroup.USER3_1,
-          USERGROUP1 = StaticUserGroup.USERGROUP1,
-          USERGROUP2 = StaticUserGroup.USERGROUP2,
-          USERGROUP3 = StaticUserGroup.USERGROUP3,
-          DB1 = "db_1",
-          DB2 = "db_2";
-
-  static final long WAIT_FOR_NOTIFICATION_PROCESSING = 5000;
+  static final long WAIT_FOR_NOTIFICATION_PROCESSING = 10000;
 
-  private Connection connection;
-  private Statement statement;
+  @BeforeClass
+  public static void setupTestStaticConfiguration() throws Exception {
+    useSentryService = true;
+    if (!setMetastoreListener) {
+      setMetastoreListener = true;
+    }
+    AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
+  }
 
+  @Override
   @Before
-  public void initialize() throws Exception{
-    super.setUpTempDir();
-    admin = "hive";
-    connection = hiveServer2.createConnection(admin, admin);
-    statement = connection.createStatement();
-    statement.execute("create role admin_role");
-    statement.execute("grant role admin_role to group hive");
-    statement.execute("grant all on server server1 to role admin_role");
+  public void setup() throws Exception {
+    super.setupAdmin();
+    super.setup();
+    // context = createContext();
+    File dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME);
+    FileOutputStream to = new FileOutputStream(dataFile);
+    Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to);
+    to.close();
+    // Check the HMS connection only when notification log is enabled.
+    if (enableNotificationLog) {
+      while (!HMSFollower.isConnectedToHms()) {
+        Thread.sleep(1000);
+      }
+    }
   }
 
-  @Test
-  public void BasicSanity() throws Exception {
-    dbNames = new String[]{DB1};
-    roles = new String[]{"admin_role", "all_db1", "all_tbl1", "all_tbl2"};
-
-    statement.execute("CREATE ROLE all_db1");
-    statement.execute("CREATE ROLE all_tbl1");
-    statement.execute("CREATE ROLE all_tbl2");
-    statement.execute("CREATE DATABASE " + DB1);
-    statement.execute("create table " + DB1 + "." + tableName3
-            + " (under_col int comment 'the under column', value string)");
-    statement.execute("create table " + DB1 + "." + tableName4
-            + " (under_col int comment 'the under column', value string)");
-
-
-    statement.execute("GRANT all ON DATABASE " + DB1 + " TO ROLE all_db1");
-    statement.execute("USE " + DB1);
-    statement.execute("GRANT all ON TABLE " + tableName3 + " TO ROLE 
all_tbl1");
-    statement.execute("GRANT all ON TABLE " + tableName4 + " TO ROLE 
all_tbl2");
-
-    statement.execute("DROP DATABASE " + DB1 + " CASCADE");
-
-    verifyDbPrivilegesDropped(statement);
+  @After
+  public void tearDown() throws Exception {
+    if (context != null) {
+      context.close();
+    }
   }
+
   /**
-   *
    * drop table and verify that the no privileges are referring to it drop db
    * and verify that the no privileges are referring to it drop db cascade
    * verify that the no privileges are referring to db and tables under it
@@ -109,15 +100,16 @@ public class TestDbPrivilegeCleanupOnDrop extends 
TestHDFSIntegrationBase {
    */
   @Test
   public void testDropObjects() throws Exception {
-    dbNames = new String[]{DB1, DB2};
-    roles = new String[]{"admin_role", "read_db1", "all_db1", "select_tbl1",
-            "insert_tbl1", "all_tbl1", "all_tbl2", "all_prod"};
+    Connection connection = context.createConnection(ADMIN1);
+    Statement statement = context.createStatement(connection);
 
     setupRoles(statement); // create required roles
     setupDbObjects(statement); // create test DBs and Tables
     setupPrivileges(statement); // setup privileges for USER1
     dropDbObjects(statement); // drop objects
-    Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    if (enableNotificationLog) {
+      Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    }
     verifyPrivilegesDropped(statement); // verify privileges are removed
 
     statement.close();
@@ -146,17 +138,17 @@ public class TestDbPrivilegeCleanupOnDrop extends 
TestHDFSIntegrationBase {
    */
   @Test
   public void testReCreateObjects() throws Exception {
-    dbNames = new String[]{DB1, DB2};
-    roles = new String[]{"admin_role", "read_db1", "all_db1", "select_tbl1",
-            "insert_tbl1", "all_tbl1", "all_tbl2", "all_prod"};
-
+    Connection connection = context.createConnection(ADMIN1);
+    Statement statement = context.createStatement(connection);
     setupRoles(statement); // create required roles
     setupDbObjects(statement); // create test DBs and Tables
     setupPrivileges(statement); // setup privileges for USER1
     dropDbObjects(statement); // drop DB and tables
 
     setupDbObjects(statement); // recreate same DBs and tables
-    Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    if (enableNotificationLog) {
+      Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    }
     verifyPrivilegesDropped(statement); // verify the stale privileges removed
   }
 
@@ -168,14 +160,15 @@ public class TestDbPrivilegeCleanupOnDrop extends 
TestHDFSIntegrationBase {
    */
   @Test
   public void testRenameTables() throws Exception {
-    dbNames = new String[]{DB1, DB2};
-    roles = new String[]{"admin_role", "read_db1", "all_db1", "select_tbl1",
-            "insert_tbl1", "all_tbl1", "all_tbl2", "all_prod"};
+    Connection connection = context.createConnection(ADMIN1);
+    Statement statement = context.createStatement(connection);
 
     setupRoles(statement); // create required roles
     setupDbObjects(statement); // create test DBs and Tables
     setupPrivileges(statement); // setup privileges for USER1
-    Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    if (enableNotificationLog) {
+      Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    }
 
     // verify privileges on the created tables
     statement.execute("USE " + DB2);
@@ -187,7 +180,9 @@ public class TestDbPrivilegeCleanupOnDrop extends 
TestHDFSIntegrationBase {
 
     renameTables(statement); // alter tables to rename
     // verify privileges removed for old tables
-    Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    if (enableNotificationLog) {
+      Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    }
     verifyTablePrivilegesDropped(statement);
 
     // verify privileges created for new tables
@@ -210,9 +205,8 @@ public class TestDbPrivilegeCleanupOnDrop extends 
TestHDFSIntegrationBase {
    */
   @Test
   public void testDropAndRenameWithMultiAction() throws Exception {
-    dbNames = new String[]{DB1, DB2};
-    roles = new String[]{"admin_role", "user_role"};
-
+    Connection connection = context.createConnection(ADMIN1);
+    Statement statement = context.createStatement(connection);
     statement.execute("CREATE ROLE user_role");
     statement.execute("GRANT ROLE user_role TO GROUP " + USERGROUP1);
 
@@ -230,15 +224,19 @@ public class TestDbPrivilegeCleanupOnDrop extends 
TestHDFSIntegrationBase {
     statement.execute("GRANT CREATE ON DATABASE " + DB1 + " TO ROLE 
user_role");
 
     // After rename table t1 to t2
-    connection = hiveServer2.createConnection(USER1_1, USER1_1);
-    statement = connection.createStatement();
+    connection = context.createConnection(USER1_1);
+    statement = context.createStatement(connection);
     statement.execute("USE " + DB1);
     statement.execute("ALTER TABLE t1 RENAME TO t2");
-    Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    if (enableNotificationLog) {
+      Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    }
 
     // After rename table t1 to t2, user_role should have permission to drop t2
     statement.execute("drop table t2");
-    Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    if (enableNotificationLog) {
+      Thread.sleep(WAIT_FOR_NOTIFICATION_PROCESSING);
+    }
     ResultSet resultSet = statement.executeQuery("SHOW GRANT ROLE user_role");
     // user_role will revoke all privilege from table t2, only remain 
DROP/CREATE on db_1
     assertRemainingRows(resultSet, 2);
@@ -355,7 +353,7 @@ public class TestDbPrivilegeCleanupOnDrop extends 
TestHDFSIntegrationBase {
         String returned = resultSet.getString(resultPos);
         assertFalse("value " + objectName + " shouldn't be detected, but 
actually " + returned + " is found from resultSet",
                 objectName.equalsIgnoreCase(returned));
-          }
+      }
       resultSet.close();
     }
   }

http://git-wip-us.apache.org/repos/asf/sentry/blob/36128ec2/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationBase.java
----------------------------------------------------------------------
diff --git 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationBase.java
 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationBase.java
index 3c8b70e..3c96d55 100644
--- 
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationBase.java
+++ 
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegrationBase.java
@@ -807,7 +807,6 @@ public abstract class TestHDFSIntegrationBase {
           properties.put(ServerConfig.RPC_ADDRESS, "localhost");
           properties.put(ServerConfig.RPC_PORT, String.valueOf(sentryPort > 0 
? sentryPort : 0));
           properties.put(ServerConfig.SENTRY_VERIFY_SCHEM_VERSION, "false");
-          properties.put("sentry.hive.server", "server1");
 
           properties.put(ServerConfig.SENTRY_STORE_GROUP_MAPPING, 
ServerConfig.SENTRY_STORE_LOCAL_GROUP_MAPPING);
           properties.put(ServerConfig.SENTRY_STORE_GROUP_MAPPING_RESOURCE, 
policyFileLocation.getPath());
@@ -856,7 +855,6 @@ public abstract class TestHDFSIntegrationBase {
     //Clean up roles
     conn = hiveServer2.createConnection("hive", "hive");
     stmt = conn.createStatement();
-    LOGGER.info("About to clear all roles");
     for( String role:roles) {
       stmt.execute("drop role " + role);
     }

Reply via email to