http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/TestUriPermissions.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/TestUriPermissions.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/TestUriPermissions.java new file mode 100644 index 0000000..a1b89ae --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/TestUriPermissions.java @@ -0,0 +1,262 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.tests.e2e.hive; + +import java.io.File; +import java.io.FileOutputStream; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.Statement; + +import com.google.common.io.Resources; +import org.junit.Assert; + +import org.apache.sentry.provider.file.PolicyFile; +import org.apache.sentry.tests.e2e.hive.hiveserver.HiveServerFactory; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +public class TestUriPermissions extends AbstractTestWithStaticConfiguration { + private PolicyFile policyFile; + private File dataFile; + private String loadData; + + @BeforeClass + public static void setupTestStaticConfiguration () throws Exception { + AbstractTestWithStaticConfiguration.setupTestStaticConfiguration(); + } + + @Before + public void setup() throws Exception { + policyFile = super.setupPolicy(); + super.setup(); + } + + // test load data into table + @Test + public void testLoadPrivileges() throws Exception { + dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME); + FileOutputStream to = new FileOutputStream(dataFile); + Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to); + to.close(); + loadData = "server=server1->uri=file://" + dataFile.getPath(); + + String tabName = "tab1"; + Connection userConn = null; + Statement userStmt = null; + + // create dbs + Connection adminCon = context.createConnection(ADMIN1); + Statement adminStmt = context.createStatement(adminCon); + adminStmt.execute("use default"); + adminStmt.execute("CREATE DATABASE " + DB1); + adminStmt.execute("use " + DB1); + adminStmt.execute("CREATE TABLE " + tabName + "(id int)"); + context.close(); + + policyFile + .addRolesToGroup(USERGROUP1, "db1_read", "db1_write", "data_read") + .addRolesToGroup(USERGROUP2, "db1_write") + .addPermissionsToRole("db1_write", "server=server1->db=" + DB1 + "->table=" + tabName + "->action=INSERT") + .addPermissionsToRole("db1_read", "server=server1->db=" + DB1 + "->table=" + tabName + "->action=SELECT") + .addPermissionsToRole("data_read", loadData); + writePolicyFile(policyFile); + + // positive test, user1 has access to file being loaded + userConn = context.createConnection(USER1_1); + userStmt = context.createStatement(userConn); + userStmt.execute("use " + DB1); + userStmt.execute("load data local inpath 'file://" + dataFile.getPath() + + "' into table " + tabName); + userStmt.execute("select * from " + tabName + " limit 1"); + ResultSet res = userStmt.getResultSet(); + Assert.assertTrue("Table should have data after load", res.next()); + res.close(); + context.close(); + + // Negative test, user2 doesn't have access to the file being loaded + userConn = context.createConnection(USER2_1); + userStmt = context.createStatement(userConn); + userStmt.execute("use " + DB1); + context.assertAuthzException(userStmt, "load data local inpath 'file://" + dataFile.getPath() + + "' into table " + tabName); + userStmt.close(); + userConn.close(); + } + + // Test alter partition location + @Test + public void testAlterPartitionLocationPrivileges() throws Exception { + String tabName = "tab1"; + String newPartitionDir = "foo"; + String tabDir = hiveServer.getProperty(HiveServerFactory.WAREHOUSE_DIR) + + "/" + tabName + "/" + newPartitionDir; + Connection userConn = null; + Statement userStmt = null; + + // create dbs + Connection adminCon = context.createConnection(ADMIN1); + Statement adminStmt = context.createStatement(adminCon); + adminStmt.execute("use default"); + adminStmt.execute("CREATE DATABASE " + DB1); + adminStmt.execute("use " + DB1); + adminStmt.execute("CREATE TABLE " + tabName + " (id int) PARTITIONED BY (dt string)"); + adminCon.close(); + + policyFile + .addRolesToGroup(USERGROUP1, "db1_all", "data_read") + .addRolesToGroup(USERGROUP2, "db1_all") + .addRolesToGroup(USERGROUP3, "db1_tab1_all", "data_read") + .addPermissionsToRole("db1_all", "server=server1->db=" + DB1) + .addPermissionsToRole("db1_tab1_all", "server=server1->db=" + DB1 + "->table=" + tabName) + .addPermissionsToRole("data_read", "server=server1->uri=" + tabDir); + writePolicyFile(policyFile); + + + // positive test: user1 has privilege to alter table add partition but not set location + userConn = context.createConnection(USER1_1); + userStmt = context.createStatement(userConn); + userStmt.execute("use " + DB1); + userStmt.execute("ALTER TABLE " + tabName + " ADD IF NOT EXISTS PARTITION (dt = '21-Dec-2012') " + + " LOCATION '" + tabDir + "'"); + userStmt.execute("ALTER TABLE " + tabName + " DROP PARTITION (dt = '21-Dec-2012')"); + userStmt.execute("ALTER TABLE " + tabName + " ADD PARTITION (dt = '21-Dec-2012') " + + " LOCATION '" + tabDir + "'"); + userStmt.execute( + "ALTER TABLE " + tabName + " PARTITION (dt = '21-Dec-2012') " + " SET LOCATION '" + tabDir + "'"); + userConn.close(); + + // negative test: user2 doesn't have privilege to alter table add partition + userConn = context.createConnection(USER2_1); + userStmt = context.createStatement(userConn); + userStmt.execute("use " + DB1); + context.assertAuthzException(userStmt, + "ALTER TABLE " + tabName + " ADD PARTITION (dt = '22-Dec-2012') " + + " LOCATION '" + tabDir + "/foo'"); + // positive test, user2 can alter managed partitions + userStmt.execute("ALTER TABLE " + tabName + " ADD PARTITION (dt = '22-Dec-2012')"); + userStmt.execute("ALTER TABLE " + tabName + " DROP PARTITION (dt = '22-Dec-2012')"); + userStmt.execute("ALTER TABLE " + tabName + " ADD IF NOT EXISTS PARTITION (dt = '22-Dec-2012')"); + userStmt.execute("ALTER TABLE " + tabName + " DROP PARTITION (dt = '22-Dec-2012')"); + userConn.close(); + + // positive test: user3 has privilege to add/drop partitions + userConn = context.createConnection(USER3_1); + userStmt = context.createStatement(userConn); + userStmt.execute("use " + DB1); + userStmt.execute( + "ALTER TABLE " + tabName + " ADD PARTITION (dt = '22-Dec-2012') " + + " LOCATION '" + tabDir + "/foo'"); + userStmt.execute( + "ALTER TABLE " + tabName + " DROP PARTITION (dt = '21-Dec-2012')"); + + userStmt.close(); + userConn.close(); + } + + // test alter table set location + @Test + public void testAlterTableLocationPrivileges() throws Exception { + String tabName = "tab1"; + String tabDir = hiveServer.getProperty(HiveServerFactory.WAREHOUSE_DIR) + "/" + tabName; + Connection userConn = null; + Statement userStmt = null; + + // create dbs + Connection adminCon = context.createConnection(ADMIN1); + Statement adminStmt = context.createStatement(adminCon); + adminStmt.execute("use default"); + adminStmt.execute("CREATE DATABASE " + DB1); + adminStmt.execute("use " + DB1); + adminStmt.execute("CREATE TABLE " + tabName + " (id int) PARTITIONED BY (dt string)"); + adminCon.close(); + + policyFile + .addRolesToGroup(USERGROUP1, "server1_all") + .addRolesToGroup(USERGROUP2, "db1_all, data_read") + .addPermissionsToRole("db1_all", "server=server1->db=" + DB1) + .addPermissionsToRole("data_read", "server=server1->URI=" + tabDir) + .addPermissionsToRole("server1_all", "server=server1"); + writePolicyFile(policyFile); + + // positive test: user2 has privilege to alter table set partition + userConn = context.createConnection(USER2_1); + userStmt = context.createStatement(userConn); + userStmt.execute("use " + DB1); + userStmt.execute( + "ALTER TABLE " + tabName + " SET LOCATION '" + tabDir + "'"); + userConn.close(); + + // positive test: user1 has privilege to alter table set partition + userConn = context.createConnection(USER1_1); + userStmt = context.createStatement(userConn); + userStmt.execute("use " + DB1); + userStmt.execute("ALTER TABLE " + tabName + " SET LOCATION '" + tabDir + "'"); + userConn.close(); + } + + // Test external table + @Test + public void testExternalTablePrivileges() throws Exception { + Connection userConn = null; + Statement userStmt = null; + + String dataDirPath = "file://" + dataDir; + String tableDir = dataDirPath + "/" + Math.random(); + + //Hive needs write permissions on this local directory + baseDir.setWritable(true, false); + dataDir.setWritable(true, false); + + // create dbs + Connection adminCon = context.createConnection(ADMIN1); + Statement adminStmt = context.createStatement(adminCon); + adminStmt.execute("use default"); + adminStmt.execute("CREATE DATABASE " + DB1); + adminStmt.close(); + adminCon.close(); + + policyFile + .addRolesToGroup(USERGROUP1, "db1_all", "data_read") + .addRolesToGroup(USERGROUP2, "db1_all") + .addPermissionsToRole("db1_all", "server=server1->db=" + DB1) + .addPermissionsToRole("data_read", "server=server1->URI=" + dataDirPath); + writePolicyFile(policyFile); + + // negative test: user2 doesn't have privilege to create external table in given path + userConn = context.createConnection(USER2_1); + userStmt = context.createStatement(userConn); + userStmt.execute("use " + DB1); + context.assertAuthzException(userStmt, + "CREATE EXTERNAL TABLE extab1(id INT) LOCATION '" + tableDir + "'"); + context.assertAuthzException(userStmt, "CREATE TABLE extab1(id INT) LOCATION '" + tableDir + "'"); + userStmt.close(); + userConn.close(); + + // positive test: user1 has privilege to create external table in given path + userConn = context.createConnection(USER1_1); + userStmt = context.createStatement(userConn); + userStmt.execute("use " + DB1); + userStmt.execute("CREATE EXTERNAL TABLE extab1(id INT) LOCATION '" + tableDir + "'"); + userStmt.execute("DROP TABLE extab1"); + userStmt.execute("CREATE TABLE extab1(id INT) LOCATION '" + tableDir + "'"); + userStmt.close(); + userConn.close(); + } + +}
http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/TestUserManagement.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/TestUserManagement.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/TestUserManagement.java new file mode 100644 index 0000000..02ac514 --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/TestUserManagement.java @@ -0,0 +1,396 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sentry.tests.e2e.hive; + +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; + +import java.io.File; +import java.io.FileOutputStream; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.Statement; + +import org.apache.hadoop.mapreduce.JobContext; +import org.apache.hive.service.cli.HiveSQLException; +import org.apache.sentry.provider.file.PolicyFile; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import com.google.common.io.Resources; + +public class TestUserManagement extends AbstractTestWithStaticConfiguration { + + private static final String SINGLE_TYPE_DATA_FILE_NAME = "kv1.dat"; + private static final String dbName = "db1"; + private static final String tableName = "t1"; + private static final String tableComment = "Test table"; + private File dataFile; + private PolicyFile policyFile; + + @Override + @Before + public void setup() throws Exception { + policyFile = super.setupPolicy(); + super.setup(); + dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME); + FileOutputStream to = new FileOutputStream(dataFile); + Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to); + to.close(); + } + @Override + @After + public void clearAfterPerTest() throws Exception { + if (context != null) { + context.close(); + } + } + + private void doCreateDbLoadDataDropDb(String admin, String...users) throws Exception { + doDropDb(admin); + for (String user : users) { + doCreateDb(user); + Connection connection = context.createConnection(user); + Statement statement = context.createStatement(connection); + ResultSet res = statement.executeQuery("SHOW DATABASES"); + boolean created = false; + while (res.next()) { + if (res.getString(1).equals(dbName)) { + created = true; + } + } + assertTrue("database " + dbName + " is not created", created); + doCreateTableLoadData(user); + doDropDb(user); + statement.close(); + connection.close(); + } + } + private void doDropDb(String user) throws Exception { + Connection connection = context.createConnection(user); + Statement statement = connection.createStatement(); + statement.execute("DROP DATABASE IF EXISTS " + dbName + " CASCADE"); + statement.close(); + connection.close(); + } + private void doCreateDb(String user) throws Exception { + Connection connection = context.createConnection(user); + Statement statement = connection.createStatement(); + statement.execute("CREATE DATABASE " + dbName); + statement.close(); + connection.close(); + } + private void doCreateTableLoadData(String user) throws Exception { + Connection connection = context.createConnection(user); + Statement statement = context.createStatement(connection); + statement.execute("USE " + dbName); + statement.execute("CREATE TABLE " + tableName + + " (under_col int comment 'the under column', value string) comment '" + + tableComment + "'"); + statement.execute("LOAD DATA LOCAL INPATH '" + dataFile.getPath() + "' into table " + tableName); + assertTrue(statement.execute("SELECT * FROM " + tableName)); + statement.close(); + connection.close(); + } + /** + * Basic sanity test + */ + @Test + public void testSanity() throws Exception { + policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP); + policyFile + .addGroupsToUser("admin1", ADMINGROUP); + writePolicyFile(policyFile); + doCreateDbLoadDataDropDb("admin1", "admin1"); + } + + /** + * Tests admin privileges allow admins to create/drop dbs + **/ + @Test + public void testAdmin1() throws Exception { + policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP); + policyFile + .addGroupsToUser("admin1", ADMINGROUP) + .addGroupsToUser("admin2", ADMINGROUP) + .addGroupsToUser("admin3", ADMINGROUP); + writePolicyFile(policyFile); + + doCreateDbLoadDataDropDb("admin1", "admin1", "admin2", "admin3"); + } + + /** + * Negative case: Tests that when a user is removed + * from the policy file their permissions have no effect + **/ + @Test + public void testAdmin3() throws Exception { + policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP); + policyFile + .addGroupsToUser("admin1", ADMINGROUP) + .addGroupsToUser("admin2", ADMINGROUP) + .addGroupsToUser("admin3", ADMINGROUP); + writePolicyFile(policyFile); + doCreateDbLoadDataDropDb("admin1", "admin1", "admin2", "admin3"); + + // remove admin1 from admin group + policyFile + .removeGroupsFromUser("admin1", ADMINGROUP) + .write(context.getPolicyFile()); + // verify admin1 doesn't have admin privilege + Connection connection = context.createConnection("admin1"); + Statement statement = connection.createStatement(); + context.assertAuthzException(statement, "CREATE DATABASE somedb"); + statement.close(); + connection.close(); + } + + /** + * Tests that users in two groups work correctly + **/ + @Test + public void testAdmin5() throws Exception { + policyFile = new PolicyFile(); + policyFile + .addRolesToGroup("admin_group1", ADMINGROUP) + .addRolesToGroup("admin_group2", ADMINGROUP) + .addPermissionsToRole(ADMINGROUP, "server=server1") + .addGroupsToUser("admin1", "admin_group1", "admin_group2") + .addGroupsToUser("admin2", "admin_group1", "admin_group2") + .addGroupsToUser("admin3", "admin_group1", "admin_group2"); + writePolicyFile(policyFile); + doCreateDbLoadDataDropDb("admin1", "admin1", "admin2", "admin3"); + } + + /** + * Tests admin group does not infect non-admin group + **/ + @Test + public void testAdmin6() throws Exception { + policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP); + policyFile + .addGroupsToUser("admin1", ADMINGROUP) + .addRolesToGroup("group1", "non_admin_role") + .addPermissionsToRole("non_admin_role", "server=server1->db=" + dbName) + .addGroupsToUser("user1", "group1"); + writePolicyFile(policyFile); + + doCreateDbLoadDataDropDb("admin1", "admin1"); + Connection connection = context.createConnection("user1"); + Statement statement = connection.createStatement(); + context.assertAuthzException(statement, "CREATE DATABASE " + dbName); + statement.close(); + connection.close(); + } + + /** + * Tests that user with two roles the most powerful role takes effect + **/ + @Test + public void testGroup2() throws Exception { + policyFile = new PolicyFile(); + policyFile + .addRolesToGroup("group1", ADMINGROUP, "analytics") + .addPermissionsToRole(ADMINGROUP, "server=server1") + .addPermissionsToRole("analytics", "server=server1->db=" + dbName) + .addGroupsToUser("user1", "group1") + .addGroupsToUser("user2", "group1") + .addGroupsToUser("user3", "group1"); + writePolicyFile(policyFile); + doCreateDbLoadDataDropDb("user1", "user1", "user2", "user3"); + } + /** + * Tests that user without uri privilege can create table but not load data + **/ + @Test + public void testGroup4() throws Exception { + policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP); + policyFile + .addGroupsToUser("admin1", ADMINGROUP) + .addRolesToGroup("group1", "non_admin_role", "load_data") + .addPermissionsToRole("non_admin_role", "server=server1->db=" + dbName) + .addGroupsToUser("user1", "group1") + .addGroupsToUser("user2", "group1") + .addGroupsToUser("user3", "group1"); + writePolicyFile(policyFile); + + doDropDb("admin1"); + for(String user : new String[]{"user1", "user2", "user3"}) { + doCreateDb("admin1"); + Connection connection = context.createConnection(user); + Statement statement = context.createStatement(connection); + statement.execute("USE " + dbName); + statement.execute("CREATE TABLE " + tableName + + " (under_col int comment 'the under column', value string) comment '" + + tableComment + "'"); + context.assertAuthzException(statement, + "LOAD DATA LOCAL INPATH '" + dataFile.getPath() + "' into table " + tableName); + assertTrue(statement.execute("SELECT * FROM " + tableName)); + statement.close(); + connection.close(); + doDropDb("admin1"); + } + } + /** + * Tests users can have same name as groups + **/ + @Test + public void testGroup5() throws Exception { + + policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP); + policyFile + .addGroupsToUser("admin1", ADMINGROUP) + .addRolesToGroup("group1", "non_admin_role", "load_data") + .addPermissionsToRole("non_admin_role", "server=server1->db=" + dbName) + .addPermissionsToRole("load_data", "server=server1->URI=file://" + dataFile.getPath()) + .addGroupsToUser("group1", "group1") + .addGroupsToUser("user2", "group1") + .addGroupsToUser("user3", "group1"); + writePolicyFile(policyFile); + + doDropDb("admin1"); + for(String user : new String[]{"group1", "user2", "user3"}) { + doCreateDb("admin1"); + doCreateTableLoadData(user); + doDropDb("admin1"); + } + } + + /** + * Tests that group names with special characters are handled correctly + **/ + @Test + public void testGroup6() throws Exception { + policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP); + policyFile + .addGroupsToUser("admin1", ADMINGROUP) + .addRolesToGroup("group1~!@#$%^&*()+-", "analytics", "load_data") + .addPermissionsToRole("analytics", "server=server1->db=" + dbName) + .addPermissionsToRole("load_data", "server=server1->URI=file://" + dataFile.getPath()) + .addGroupsToUser("user1", "group1~!@#$%^&*()+-") + .addGroupsToUser("user2", "group1~!@#$%^&*()+-") + .addGroupsToUser("user3", "group1~!@#$%^&*()+-"); + writePolicyFile(policyFile); + + doDropDb("admin1"); + for(String user : new String[]{"user1", "user2", "user3"}) { + doCreateDb("admin1"); + doCreateTableLoadData(user); + doDropDb("admin1"); + } + } + + /** + * Tests that user names with special characters are handled correctly + **/ + @Test + public void testGroup7() throws Exception { + policyFile = new PolicyFile(); + policyFile + .addRolesToGroup("group1", ADMINGROUP) + .addPermissionsToRole(ADMINGROUP, "server=server1") + .addGroupsToUser("user1~!#$%^&*()+-", "group1") + .addGroupsToUser("user2", "group1") + .addGroupsToUser("user3", "group1"); + writePolicyFile(policyFile); + doCreateDbLoadDataDropDb("user1~!#$%^&*()+-", "user1~!#$%^&*()+-", "user2", "user3"); + } + + /** + * Tests that users with no privileges cannot list any tables + **/ + @Test + public void testGroup8() throws Exception { + policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP); + policyFile + .addGroupsToUser("admin1", ADMINGROUP) + .addRolesToGroup("group1", "analytics") + .addGroupsToUser("user1", "group1") + .addGroupsToUser("user2", "group1") + .addGroupsToUser("user3", "group1"); + writePolicyFile(policyFile); + + Connection connection = context.createConnection("admin1"); + Statement statement = connection.createStatement(); + statement.execute("DROP DATABASE IF EXISTS db1 CASCADE"); + statement.execute("CREATE DATABASE db1"); + statement.execute("USE db1"); + statement.execute("CREATE TABLE t1 (under_col int, value string)"); + statement.close(); + connection.close(); + String[] users = { "user1", "user2", "user3" }; + for (String user : users) { + connection = context.createConnection(user); + statement = context.createStatement(connection); + assertFalse("No results should be returned", + statement.executeQuery("SHOW TABLES").next()); + statement.close(); + connection.close(); + } + } + + /** + * Tests that users without group information will cause the configuration exception + **/ + @Test + public void testGroup9() throws Exception { + policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP); + policyFile.addGroupsToUser("admin1", ADMINGROUP); + writePolicyFile(policyFile); + + Connection connection = context.createConnection("admin1"); + Statement statement = connection.createStatement(); + statement.execute("DROP DATABASE IF EXISTS db1 CASCADE"); + statement.execute("CREATE DATABASE db1"); + statement.execute("USE db1"); + statement.execute("CREATE TABLE t1 (under_col int)"); + statement.close(); + connection.close(); + + // user1 hasn't any group + connection = context.createConnection("user1"); + statement = context.createStatement(connection); + // for any sql need to be authorized, exception will be thrown if the uer hasn't any group + // information + try { + statement.execute("CREATE TABLE db1.t1 (under_col int, value string)"); + fail("User without group configuration, SentryGroupNotFoundException should be thrown "); + } catch (HiveSQLException hse) { + assertTrue(hse.getMessage().indexOf("SentryGroupNotFoundException") >= 0); + } + try { + statement.execute("SELECT under_col from db1.t1"); + fail("User without group configuration, SentryGroupNotFoundException should be thrown "); + } catch (HiveSQLException hse) { + assertTrue(hse.getMessage().indexOf("SentryGroupNotFoundException") >= 0); + } + statement.close(); + connection.close(); + } + + @Test + public void testMrAclsSetting() throws Exception { + Connection connection = context.createConnection("admin1"); + context + .verifySessionConf(connection, JobContext.JOB_ACL_VIEW_JOB, "admin1"); + context + .verifySessionConf(connection, JobContext.JOB_ACL_VIEW_JOB, "admin1"); + } +} http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/TestViewPrivileges.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/TestViewPrivileges.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/TestViewPrivileges.java new file mode 100644 index 0000000..32da2ae --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/TestViewPrivileges.java @@ -0,0 +1,138 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sentry.tests.e2e.hive; + +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; + +import java.io.File; +import java.io.FileOutputStream; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.Map; + +import org.junit.Assert; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.sentry.provider.file.PolicyFile; +import org.junit.AfterClass; +import org.junit.Assume; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.google.common.collect.Maps; +import com.google.common.io.Resources; + +public class TestViewPrivileges extends AbstractTestWithHiveServer { + protected static final String SERVER_HOST = "localhost"; + + private static Context context; + private static Map<String, String> properties; + private PolicyFile policyFile; + + private final String SINGLE_TYPE_DATA_FILE_NAME = "kv1.dat"; + + @BeforeClass + public static void setUp() throws Exception { + properties = Maps.newHashMap(); + properties.put(HiveConf.ConfVars.METASTORE_AUTO_CREATE_ALL.varname, "true"); + context = createContext(properties); + } + + @AfterClass + public static void tearDown() throws Exception { + if(context != null) { + context.close(); + } + } + + @Before + public void setupPolicyFile() throws Exception { + policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP); + } + + @Test + public void testPartitioned() throws Exception { + // copy data file to test dir + File dataDir = context.getDataDir(); + File dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME); + FileOutputStream to = new FileOutputStream(dataFile); + Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to); + to.close(); + + String viewName = "view1"; + String db = "db1"; + String tabName = "tab1"; + policyFile + .addPermissionsToRole("view", "server=server1->db=" + db + "->table=" + viewName) + .addRolesToGroup(USERGROUP1, "view") + .setUserGroupMapping(StaticUserGroup.getStaticMapping()); + policyFile.write(context.getPolicyFile()); + + //admin creates a view + Connection conn = context.createConnection(ADMIN1); + Statement stmt = context.createStatement(conn); + stmt.execute("DROP DATABASE IF EXISTS " + db + " CASCADE"); + stmt.execute("CREATE DATABASE " + db); + + stmt.execute("use " + db); + stmt.execute("create table " + tabName + " (id int) partitioned by (part string)"); + stmt.execute("load data local inpath '" + dataFile + "' into table " + tabName + " PARTITION (part=\"a\")"); + stmt.execute("load data local inpath '" + dataFile + "' into table " + tabName + " PARTITION (part=\"b\")"); + ResultSet res = stmt.executeQuery("select count(*) from " + tabName); + Assert.assertThat(res, notNullValue()); + while(res.next()) { + Assume.assumeTrue(res.getInt(1) == Integer.valueOf(1000)); + } + stmt.execute("create view " + viewName + " as select * from " + tabName + " where id<100"); + res = stmt.executeQuery("select count(*) from " + viewName); + Assert.assertThat(res, notNullValue()); + int rowsInView = 0; + while(res.next()) { + rowsInView = res.getInt(1); + } + stmt.close(); + conn.close(); + + Connection userConn = context.createConnection(USER1_1); + Statement userStmt = context.createStatement(userConn); + userStmt.execute("use " + db); + res = userStmt.executeQuery("select count(*) from " + viewName); + Assert.assertThat(res, notNullValue()); + while(res.next()) { + Assert.assertThat(res.getInt(1), is(rowsInView)); + } + userStmt.close(); + userConn.close(); + + // user2 hasn't the privilege for the view + userConn = context.createConnection(USER2_1); + userStmt = context.createStatement(userConn); + try { + userStmt.executeQuery("select count(*) from " + viewName); + Assert.fail("Expected SQL exception"); + } catch (SQLException e) { + // ignore the exception + } + userStmt.close(); + userConn.close(); + } +} http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/AbstractDFS.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/AbstractDFS.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/AbstractDFS.java new file mode 100644 index 0000000..32aabb4 --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/AbstractDFS.java @@ -0,0 +1,87 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.tests.e2e.hive.fs; + +import org.junit.Assert; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.File; +import java.io.IOException; + +public abstract class AbstractDFS implements DFS{ + private static final Logger LOGGER = LoggerFactory + .getLogger(AbstractDFS.class); + protected static FileSystem fileSystem; + protected static Path dfsBaseDir; + public Path sentryDir; + + + @Override + public FileSystem getFileSystem(){ + return fileSystem; + } + + @Override + public void tearDown() throws Exception { + cleanBaseDir(); + } + + @Override + public void createBaseDir() throws Exception { + Assert.assertTrue(dfsBaseDir.toString(), fileSystem.delete(dfsBaseDir, true)); + Assert.assertTrue(dfsBaseDir.toString(), fileSystem.mkdirs(dfsBaseDir)); + } + + @Override + public Path assertCreateDir(String path) throws Exception{ + return assertCreateDfsDir( new Path(dfsBaseDir + path)); + } + + @Override + public Path getBaseDir(){ + return dfsBaseDir; + } + + @Override + public void writePolicyFile(File srcFile) throws IOException { + String policyFileName = srcFile.getName(); + Path destPath = new Path(sentryDir, policyFileName); + fileSystem.copyFromLocalFile(true, true, new Path(srcFile.getAbsolutePath()), destPath); + LOGGER.info("Copied file to HDFS: " + destPath.toString()); + } + + protected void cleanBaseDir() throws Exception { + cleanDir(dfsBaseDir); + } + + protected void cleanDir(Path dir) throws Exception { + if(dir != null) { + Assert.assertTrue(dir.toString(), fileSystem.delete(dir, true)); + } + } + + protected Path assertCreateDfsDir(Path dir) throws IOException { + if(!fileSystem.isDirectory(dir)) { + Assert.assertTrue("Failed creating " + dir, fileSystem.mkdirs(dir)); + } + return dir; + } + +} http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/ClusterDFS.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/ClusterDFS.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/ClusterDFS.java new file mode 100644 index 0000000..06415cb --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/ClusterDFS.java @@ -0,0 +1,69 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.tests.e2e.hive.fs; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.security.UserGroupInformation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.security.PrivilegedExceptionAction; +import java.util.Random; + +public class ClusterDFS extends AbstractDFS{ + private static final Logger LOGGER = LoggerFactory + .getLogger(ClusterDFS.class); + public static final String TEST_USER = "sentry.e2etest.hive.test.user"; + private static final String testUser = System.getProperty(TEST_USER, "hive"); + private static final String KEYTAB_LOCATION = System.getProperty("sentry.e2e.hive.keytabs.location"); + private UserGroupInformation ugi; + + ClusterDFS() throws Exception{ + ugi = UserGroupInformation.loginUserFromKeytabAndReturnUGI(testUser, KEYTAB_LOCATION + "/" + testUser + ".keytab"); + fileSystem = getFS(ugi); + LOGGER.info("File system uri for policy files: " + fileSystem.getUri()); + LOGGER.info("Creating basedir as user : " + testUser); + String policyDir = System.getProperty("sentry.e2etest.hive.policy.location", "/user/hive/sentry"); + sentryDir = super.assertCreateDfsDir(new Path(fileSystem.getUri() + policyDir)); + dfsBaseDir = super.assertCreateDfsDir(new Path(fileSystem.getUri() + "/tmp/" + (new Random()).nextInt())); + } + + @Override + public Path assertCreateDir(String path) throws Exception{ + if(path.startsWith("/")){ + return super.assertCreateDfsDir(new Path(path)); + }else { + return super.assertCreateDfsDir( new Path(dfsBaseDir + path)); + } + } + + @Override + protected void cleanBaseDir() throws Exception { + super.cleanBaseDir(); + super.cleanDir(sentryDir); + } + private FileSystem getFS(UserGroupInformation ugi) throws Exception { + return ugi.doAs(new PrivilegedExceptionAction<FileSystem>() { + public FileSystem run() throws Exception { + Configuration conf = new Configuration(); + return FileSystem.get(conf); + } + }); + } +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/DFS.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/DFS.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/DFS.java new file mode 100644 index 0000000..67ba338 --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/DFS.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.tests.e2e.hive.fs; + +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; + +import java.io.File; +import java.io.IOException; + +public interface DFS { + FileSystem getFileSystem(); + void tearDown() throws Exception; + Path assertCreateDir(String dir) throws Exception; + Path getBaseDir(); + void createBaseDir() throws Exception; + void writePolicyFile(File srcFile) throws IOException; +} http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/DFSFactory.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/DFSFactory.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/DFSFactory.java new file mode 100644 index 0000000..e1881b4 --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/DFSFactory.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.tests.e2e.hive.fs; + +import java.io.File; + +import com.google.common.annotations.VisibleForTesting; + +public class DFSFactory { + public static final String FS_TYPE = "sentry.e2etest.DFSType"; + + public static DFS create(String dfsType, File baseDir, + String serverType) throws Exception { + DFSType type; + if(dfsType!=null) { + type = DFSType.valueOf(dfsType.trim()); + }else { + type = DFSType.MiniDFS; + } + switch (type) { + case MiniDFS: + return new MiniDFS(baseDir, serverType); + case ClusterDFS: + return new ClusterDFS(); + default: + throw new UnsupportedOperationException(type.name()); + } + } + + @VisibleForTesting + public static enum DFSType { + MiniDFS, + ClusterDFS; + }; +} http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/MiniDFS.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/MiniDFS.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/MiniDFS.java new file mode 100644 index 0000000..77af432 --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/fs/MiniDFS.java @@ -0,0 +1,94 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.tests.e2e.hive.fs; + +import java.io.File; +import java.io.IOException; +import java.util.List; + +import org.junit.Assert; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.security.GroupMappingServiceProvider; +import org.apache.hadoop.security.Groups; +import org.apache.sentry.tests.e2e.hive.hiveserver.HiveServerFactory.HiveServer2Type; + +import com.google.common.collect.Lists; + +public class MiniDFS extends AbstractDFS { + // mock user group mapping that maps user to same group + public static class PseudoGroupMappingService implements + GroupMappingServiceProvider { + + @Override + public List<String> getGroups(String user) { + return Lists.newArrayList(user, System.getProperty("user.name")); + } + + @Override + public void cacheGroupsRefresh() throws IOException { + // no-op + } + + @Override + public void cacheGroupsAdd(List<String> groups) throws IOException { + // no-op + } + } + + private static MiniDFSCluster dfsCluster; + + MiniDFS(File baseDir, String serverType) throws Exception { + Configuration conf = new Configuration(); + if (HiveServer2Type.InternalMetastore.name().equalsIgnoreCase(serverType)) { + // set the test group mapping that maps user to a group of same name + conf.set("hadoop.security.group.mapping", + "org.apache.sentry.tests.e2e.hive.fs.MiniDFS$PseudoGroupMappingService"); + // set umask for metastore test client can create tables in the warehouse dir + conf.set("fs.permissions.umask-mode", "000"); + Groups.getUserToGroupsMappingServiceWithLoadedConfiguration(conf); + } + File dfsDir = assertCreateDir(new File(baseDir, "dfs")); + conf.set(MiniDFSCluster.HDFS_MINIDFS_BASEDIR, dfsDir.getPath()); + conf.set("hadoop.security.group.mapping", + MiniDFS.PseudoGroupMappingService.class.getName()); + Configuration.addDefaultResource("test.xml"); + dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); + fileSystem = dfsCluster.getFileSystem(); + String policyDir = System.getProperty("sentry.e2etest.hive.policy.location", "/user/hive/sentry"); + sentryDir = super.assertCreateDfsDir(new Path(fileSystem.getUri() + policyDir)); + dfsBaseDir = assertCreateDfsDir(new Path(new Path(fileSystem.getUri()), "/base")); + } + + @Override + public void tearDown() throws Exception { + if(dfsCluster != null) { + dfsCluster.shutdown(); + dfsCluster = null; + } + } + + //Utilities + private static File assertCreateDir(File dir) { + if(!dir.isDirectory()) { + Assert.assertTrue("Failed creating " + dir, dir.mkdirs()); + } + return dir; + } +} http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/AbstractHiveServer.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/AbstractHiveServer.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/AbstractHiveServer.java new file mode 100644 index 0000000..dda7dba --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/AbstractHiveServer.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.tests.e2e.hive.hiveserver; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.sql.SQLException; +import java.util.concurrent.TimeoutException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Strings; + +public abstract class AbstractHiveServer implements HiveServer { + private static final Logger LOGGER = LoggerFactory.getLogger(InternalHiveServer.class); + + + private static final String LINK_FAILURE_SQL_STATE = "08S01"; + + private final Configuration configuration; + private final String hostname; + private final int port; + + public AbstractHiveServer(Configuration configuration, String hostname, + int port) { + this.configuration = configuration; + this.hostname = hostname; + this.port = port; + LOGGER.info("Hive Server2 configured on host: " + hostname + " port:" + port); + } + + @Override + public String getProperty(String key) { + return configuration.get(key); + } + + @Override + public String getURL() { + return "jdbc:hive2://" + hostname + ":" + port + "/default"; + } + + public Connection createConnection(String user, String password) throws Exception{ + String url = getURL(); + DriverManager.setLoginTimeout(0); + Connection connection = DriverManager.getConnection(url, user, password); + return connection; + } + + protected static String getHostname(HiveConf hiveConf) { + return hiveConf.get(ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST.toString(), "localhost").trim(); + } + protected static int getPort(HiveConf hiveConf) { + return Integer.parseInt(hiveConf.get(ConfVars.HIVE_SERVER2_THRIFT_PORT.toString(), "10000").trim()); + } + protected static void waitForStartup(HiveServer hiveServer) throws Exception { + int waitTime = 0; + long startupTimeout = 1000L * 10L; + do { + Thread.sleep(500L); + waitTime += 500L; + if (waitTime > startupTimeout) { + throw new TimeoutException("Couldn't access new HiveServer: " + hiveServer.getURL()); + } + try { + DriverManager.setLoginTimeout(30); + Connection connection = DriverManager.getConnection(hiveServer.getURL(), "foo", "bar"); + connection.close(); + break; + } catch (SQLException e) { + String state = Strings.nullToEmpty(e.getSQLState()).trim(); + if (!state.equalsIgnoreCase(LINK_FAILURE_SQL_STATE)) { + throw e; + } + } + } while (true); + } +} http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/EmbeddedHiveServer.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/EmbeddedHiveServer.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/EmbeddedHiveServer.java new file mode 100644 index 0000000..52ba09e --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/EmbeddedHiveServer.java @@ -0,0 +1,60 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sentry.tests.e2e.hive.hiveserver; + +import java.sql.Connection; +import java.sql.DriverManager; + +import org.apache.hadoop.hive.metastore.HiveMetaStore; +import org.fest.reflect.core.Reflection; + +public class EmbeddedHiveServer implements HiveServer { + + @Override + public void start() { + // Fix for ACCESS-148. Resets a static field + // so the default database is created even + // though is has been created before in this JVM + Reflection.staticField("createDefaultDB") + .ofType(boolean.class) + .in(HiveMetaStore.HMSHandler.class) + .set(false); + } + + public Connection createConnection(String user, String password) throws Exception{ + String url = getURL(); + DriverManager.setLoginTimeout(30); + Connection connection = DriverManager.getConnection(url, user, password); + return connection; + } + + @Override + public void shutdown() { + + } + + @Override + public String getURL() { + return "jdbc:hive2://"; + } + + @Override + public String getProperty(String key) { + throw new UnsupportedOperationException(); + } +} http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/ExternalHiveServer.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/ExternalHiveServer.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/ExternalHiveServer.java new file mode 100644 index 0000000..88edc08 --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/ExternalHiveServer.java @@ -0,0 +1,124 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.tests.e2e.hive.hiveserver; + +import java.io.BufferedReader; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.util.List; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Charsets; +import com.google.common.base.Joiner; +import com.google.common.base.Preconditions; +import com.google.common.base.Splitter; +import com.google.common.base.Strings; +import com.google.common.collect.Lists; +import com.google.common.io.Files; + + +public class ExternalHiveServer extends AbstractHiveServer { + private static final Logger LOGGER = LoggerFactory + .getLogger(ExternalHiveServer.class); + private final File confDir; + private final File logDir; + private Process process; + + public ExternalHiveServer(HiveConf hiveConf, File confDir, File logDir) throws Exception { + super(hiveConf, getHostname(hiveConf), getPort(hiveConf)); + this.confDir = confDir; + this.logDir = logDir; + } + + + @Override + public synchronized void start() throws Exception { + String hiveCommand = System.getProperty("hive.bin.path", "./target/hive/bin/hive"); + String hadoopHome = System.getProperty("hadoop.home", "./target/hadoop"); + String hadoopClasspath = getHadoopClasspath(); + String command = "export "; + command += String.format("HIVE_CONF_DIR=\"%s\" HADOOP_HOME=\"%s\" ", confDir.getPath(), hadoopHome); + command += String.format("HADOOP_CLASSPATH=\"%s:%s\" ", confDir.getPath(), hadoopClasspath); + command += "HADOOP_CLIENT_OPTS=\"-Dhive.log.dir=./target/\""; + command += "; "; + command += String.format("%s --service hiveserver2 >%s/hs2.out 2>&1 & echo $! > %s/hs2.pid", + hiveCommand, logDir.getPath(), logDir.getPath()); + LOGGER.info("Executing " + command); + process = Runtime.getRuntime(). + exec(new String[]{"/bin/sh", "-c", command}); + waitForStartup(this); + } + + @Override + public synchronized void shutdown() throws Exception { + if(process != null) { + process.destroy(); + process = null; + String pid = Strings.nullToEmpty(Files.readFirstLine(new File(logDir, "hs2.pid"), Charsets.UTF_8)).trim(); + if(!pid.isEmpty()) { + LOGGER.info("Killing " + pid); + Process killCommand = Runtime.getRuntime(). + exec(new String[]{"/bin/sh", "-c", "kill " + pid}); + // TODO this isn't strictly correct but kill won't output much data + String error = read(killCommand.getErrorStream()); + String output = read(killCommand.getInputStream()); + LOGGER.info("Kill exit code " + killCommand.waitFor() + + ", output = '" + output + "', error = '" + error + "'"); + } + } + } + + private String read(InputStream is) throws IOException { + BufferedReader reader = new BufferedReader(new InputStreamReader(is)); + StringBuffer buffer = new StringBuffer(); + try { + String line; + while((line = reader.readLine()) != null) { + buffer.append(line); + } + return buffer.toString(); + } finally { + reader.close(); + } + + } + + private String getHadoopClasspath() { + List<String> result = Lists.newArrayList(); + String clazzPath = Preconditions.checkNotNull(System.getProperty("java.class.path"), "java.class.path"); + String sep = Preconditions.checkNotNull(System.getProperty("path.separator"), "path.separator"); + for(String item : Splitter.on(sep).omitEmptyStrings().trimResults().split(clazzPath)) { + if(item.endsWith("/sentry-tests/target/classes") || + item.endsWith("/sentry-tests/target/test-classes")) { + result.add(item); + } else { + File clazzPathItem = new File(item); + String fileName = clazzPathItem.getName(); + if(clazzPathItem.isFile() && fileName.startsWith("sentry-") && fileName.endsWith(".jar")) { + result.add(item); + } + } + } + return Joiner.on(sep).join(result); + } + +} http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServer.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServer.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServer.java new file mode 100644 index 0000000..175e84c --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServer.java @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sentry.tests.e2e.hive.hiveserver; + +import java.sql.Connection; + +public interface HiveServer { + + void start() throws Exception; + + void shutdown() throws Exception; + + String getURL(); + + String getProperty(String key); + + Connection createConnection(String user, String password) throws Exception; + +} http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServerFactory.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServerFactory.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServerFactory.java new file mode 100644 index 0000000..3afde62 --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/HiveServerFactory.java @@ -0,0 +1,296 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.tests.e2e.hive.hiveserver; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.net.ServerSocket; +import java.net.URL; +import java.nio.file.FileSystems; +import java.util.Map; + +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hdfs.DistributedFileSystem; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.sentry.binding.hive.conf.HiveAuthzConf; +import org.apache.sentry.binding.hive.conf.HiveAuthzConf.AuthzConfVars; +import org.apache.sentry.binding.hive.v2.HiveAuthzBindingSessionHookV2; +import org.apache.sentry.binding.hive.v2.SentryAuthorizerFactory; +import org.apache.sentry.binding.hive.v2.metastore.AuthorizingObjectStoreV2; +import org.apache.sentry.binding.hive.v2.metastore.MetastoreAuthzBindingV2; +import org.apache.sentry.provider.file.LocalGroupResourceAuthorizationProvider; +import org.fest.reflect.core.Reflection; +import org.junit.Assert; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.annotations.VisibleForTesting; +import com.google.common.io.Resources; + +public class HiveServerFactory { + private static final Logger LOGGER = LoggerFactory + .getLogger(HiveServerFactory.class); + private static final String HIVE_DRIVER_NAME = "org.apache.hive.jdbc.HiveDriver"; + private static final String DERBY_DRIVER_NAME = "org.apache.derby.jdbc.EmbeddedDriver"; + public static final String HIVESERVER2_TYPE = "sentry.e2etest.hiveServer2Type"; + public static final String KEEP_BASEDIR = "sentry.e2etest.keepBaseDir"; + public static final String METASTORE_CONNECTION_URL = HiveConf.ConfVars.METASTORECONNECTURLKEY.varname; + public static final String WAREHOUSE_DIR = HiveConf.ConfVars.METASTOREWAREHOUSE.varname; + public static final String AUTHZ_PROVIDER = HiveAuthzConf.AuthzConfVars.AUTHZ_PROVIDER.getVar(); + public static final String AUTHZ_PROVIDER_RESOURCE = HiveAuthzConf.AuthzConfVars.AUTHZ_PROVIDER_RESOURCE.getVar(); + public static final String AUTHZ_PROVIDER_BACKEND = HiveAuthzConf.AuthzConfVars.AUTHZ_PROVIDER_BACKEND.getVar(); + public static final String AUTHZ_PROVIDER_FILENAME = "sentry-provider.ini"; + public static final String AUTHZ_SERVER_NAME = HiveAuthzConf.AuthzConfVars.AUTHZ_SERVER_NAME.getVar(); + public static final String ACCESS_TESTING_MODE = HiveAuthzConf.AuthzConfVars.SENTRY_TESTING_MODE.getVar(); + public static final String HS2_PORT = ConfVars.HIVE_SERVER2_THRIFT_PORT.toString(); + public static final String SUPPORT_CONCURRENCY = HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY.varname; + public static final String HADOOPBIN = ConfVars.HADOOPBIN.toString(); + public static final String DEFAULT_AUTHZ_SERVER_NAME = "server1"; + public static final String HIVESERVER2_IMPERSONATION = "hive.server2.enable.doAs"; + public static final String METASTORE_URI = HiveConf.ConfVars.METASTOREURIS.varname; + public static final String METASTORE_HOOK = HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname; + public static final String METASTORE_SETUGI = HiveConf.ConfVars.METASTORE_EXECUTE_SET_UGI.varname; + public static final String METASTORE_BYPASS = AuthzConfVars.AUTHZ_METASTORE_SERVICE_USERS.getVar(); + public static final String METASTORE_CLIENT_TIMEOUT = HiveConf.ConfVars.METASTORE_CLIENT_SOCKET_TIMEOUT.varname; + public static final String METASTORE_RAW_STORE_IMPL = HiveConf.ConfVars.METASTORE_RAW_STORE_IMPL.varname; + + static { + try { + Assert.assertNotNull(DERBY_DRIVER_NAME + " is null", Class.forName(DERBY_DRIVER_NAME)); + Assert.assertNotNull(HIVE_DRIVER_NAME + " is null", Class.forName(HIVE_DRIVER_NAME)); + } catch (ClassNotFoundException e) { + throw new RuntimeException(e); + } + } + + public static HiveServer create(Map<String, String> properties, + File baseDir, File confDir, File logDir, String policyFile, + FileSystem fileSystem) throws Exception { + String type = properties.get(HIVESERVER2_TYPE); + if(type == null) { + type = System.getProperty(HIVESERVER2_TYPE); + } + if(type == null) { + type = HiveServer2Type.InternalHiveServer2.name(); + } + return create(HiveServer2Type.valueOf(type.trim()), properties, + baseDir, confDir, logDir, policyFile, fileSystem); + } + public static HiveServer create(HiveServer2Type type, + Map<String, String> properties, File baseDir, File confDir, + File logDir, String policyFile, FileSystem fileSystem) throws Exception { + + if(type.equals(HiveServer2Type.UnmanagedHiveServer2)){ + LOGGER.info("Creating UnmanagedHiveServer"); + return new UnmanagedHiveServer(); + } + if(!properties.containsKey(WAREHOUSE_DIR)) { + LOGGER.info("fileSystem " + fileSystem.getClass().getSimpleName()); + if (fileSystem instanceof DistributedFileSystem) { + @SuppressWarnings("static-access") + String dfsUri = fileSystem.getDefaultUri(fileSystem.getConf()).toString(); + LOGGER.info("dfsUri " + dfsUri); + properties.put(WAREHOUSE_DIR, dfsUri + "/data"); + fileSystem.mkdirs(new Path("/data/"), new FsPermission((short) 0777)); + } else { + properties.put(WAREHOUSE_DIR, new File(baseDir, "warehouse").getPath()); + fileSystem.mkdirs(new Path("/", "warehouse"), new FsPermission((short) 0777)); + } + } + Boolean policyOnHDFS = Boolean.valueOf(System.getProperty("sentry.e2etest.policyonhdfs", "false")); + if (policyOnHDFS) { + // Initialize "hive.exec.scratchdir", according the description of + // "hive.exec.scratchdir", the permission should be (733). + // <description>HDFS root scratch dir for Hive jobs which gets created with write + // all (733) permission. For each connecting user, an HDFS scratch dir: + // ${hive.exec.scratchdir}/<username> is created, + // with ${hive.scratch.dir.permission}.</description> + fileSystem.mkdirs(new Path("/tmp/hive/")); + fileSystem.setPermission(new Path("/tmp/hive/"), new FsPermission((short) 0733)); + } else { + LOGGER.info("Setting an readable path to hive.exec.scratchdir"); + properties.put("hive.exec.scratchdir", new File(baseDir, "scratchdir").getPath()); + } + if(!properties.containsKey(METASTORE_CONNECTION_URL)) { + properties.put(METASTORE_CONNECTION_URL, + String.format("jdbc:derby:;databaseName=%s;create=true", + new File(baseDir, "metastore").getPath())); + } + if(!properties.containsKey(ACCESS_TESTING_MODE)) { + properties.put(ACCESS_TESTING_MODE, "true"); + } + if(!properties.containsKey(AUTHZ_PROVIDER_RESOURCE)) { + LOGGER.info("Policy File location: " + policyFile); + properties.put(AUTHZ_PROVIDER_RESOURCE, policyFile); + } + if(!properties.containsKey(AUTHZ_PROVIDER)) { + properties.put(AUTHZ_PROVIDER, LocalGroupResourceAuthorizationProvider.class.getName()); + } + if(!properties.containsKey(AUTHZ_SERVER_NAME)) { + properties.put(AUTHZ_SERVER_NAME, DEFAULT_AUTHZ_SERVER_NAME); + } + if(!properties.containsKey(HS2_PORT)) { + properties.put(HS2_PORT, String.valueOf(findPort())); + } + if(!properties.containsKey(SUPPORT_CONCURRENCY)) { + properties.put(SUPPORT_CONCURRENCY, "false"); + } + if(!properties.containsKey(HADOOPBIN)) { + properties.put(HADOOPBIN, "./target/test-classes/hadoop"); + } + + // Modify the test resource to have executable permission + java.nio.file.Path hadoopPath = FileSystems.getDefault().getPath("target/test-classes", "hadoop"); + if (hadoopPath != null) { + hadoopPath.toFile().setExecutable(true); + } + + properties.put(METASTORE_RAW_STORE_IMPL, AuthorizingObjectStoreV2.class.getName()); + if (!properties.containsKey(METASTORE_URI) && HiveServer2Type.InternalMetastore.equals(type)) { + // The configuration sentry.metastore.service.users is for the user who + // has all access to get the metadata. + properties.put(METASTORE_BYPASS, "accessAllMetaUser"); + properties.put(METASTORE_URI, + "thrift://localhost:" + String.valueOf(findPort())); + if (!properties.containsKey(METASTORE_HOOK)) { + properties.put(METASTORE_HOOK, MetastoreAuthzBindingV2.class.getName()); + } + properties.put(ConfVars.METASTORESERVERMINTHREADS.varname, "5"); + } + + // set the SentryMetaStoreFilterHook for HiveServer2 only, not for metastore + if (!HiveServer2Type.InternalMetastore.equals(type)) { + properties.put(ConfVars.METASTORE_FILTER_HOOK.varname, + org.apache.sentry.binding.metastore.SentryMetaStoreFilterHook.class.getName()); + } + + if (!properties.containsKey(METASTORE_BYPASS)) { + properties.put(METASTORE_BYPASS, "hive,impala," + System.getProperty("user.name", "")); + } else { + String tempByPass = properties.get(METASTORE_BYPASS); + tempByPass = "hive,impala," + System.getProperty("user.name", "") + "," + tempByPass; + properties.put(METASTORE_BYPASS, tempByPass); + } + + properties.put(METASTORE_SETUGI, "true"); + properties.put(METASTORE_CLIENT_TIMEOUT, "100"); + properties.put(ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS.varname, "true"); + + properties.put(ConfVars.HIVESTATSAUTOGATHER.varname, "false"); + properties.put(ConfVars.HIVE_STATS_COLLECT_SCANCOLS.varname, "true"); + String hadoopBinPath = properties.get(HADOOPBIN); + Assert.assertNotNull(hadoopBinPath, "Hadoop Bin"); + File hadoopBin = new File(hadoopBinPath); + if(!hadoopBin.isFile()) { + Assert.fail("Path to hadoop bin " + hadoopBin.getPath() + " is invalid. " + + "Perhaps you missed the download-hadoop profile."); + } + + /* + * This hack, setting the hiveSiteURL field removes a previous hack involving + * setting of system properties for each property. Although both are hacks, + * I prefer this hack because once the system properties are set they can + * affect later tests unless those tests clear them. This hack allows for + * a clean switch to a new set of defaults when a new HiveConf object is created. + */ + Reflection.staticField("hiveSiteURL") + .ofType(URL.class) + .in(HiveConf.class) + .set(null); + HiveConf hiveConf = new HiveConf(); + HiveAuthzConf authzConf = new HiveAuthzConf(Resources.getResource("sentry-site.xml")); + for(Map.Entry<String, String> entry : properties.entrySet()) { + LOGGER.info(entry.getKey() + " => " + entry.getValue()); + hiveConf.set(entry.getKey(), entry.getValue()); + authzConf.set(entry.getKey(), entry.getValue()); + } + File hiveSite = new File(confDir, "hive-site.xml"); + File accessSite = new File(confDir, HiveAuthzConf.AUTHZ_SITE_FILE); + OutputStream out = new FileOutputStream(accessSite); + authzConf.writeXml(out); + out.close(); + // points hive-site.xml at access-site.xml + hiveConf.set(HiveAuthzConf.HIVE_SENTRY_CONF_URL, "file:///" + accessSite.getPath()); + + if(!properties.containsKey(HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK.varname)) { + hiveConf.set(HiveConf.ConfVars.HIVE_SERVER2_SESSION_HOOK.varname, + HiveAuthzBindingSessionHookV2.class.getName()); + } + switch (type) { + case EmbeddedHiveServer2: + case InternalHiveServer2: + case ExternalHiveServer2: + // authorization V2 is userd for hiveserver2 + hiveConf.setBoolean(HiveConf.ConfVars.HIVE_AUTHORIZATION_ENABLED.varname, true); + hiveConf.setVar(ConfVars.HIVE_AUTHORIZATION_MANAGER, + SentryAuthorizerFactory.class.getName()); + default: + } + hiveConf.set(HIVESERVER2_IMPERSONATION, "false"); + out = new FileOutputStream(hiveSite); + hiveConf.writeXml(out); + out.close(); + + Reflection.staticField("hiveSiteURL") + .ofType(URL.class) + .in(HiveConf.class) + .set(hiveSite.toURI().toURL()); + + switch (type) { + case EmbeddedHiveServer2: + LOGGER.info("Creating EmbeddedHiveServer"); + return new EmbeddedHiveServer(); + case InternalHiveServer2: + LOGGER.info("Creating InternalHiveServer"); + return new InternalHiveServer(hiveConf); + case InternalMetastore: + LOGGER.info("Creating InternalMetastoreServer"); + return new InternalMetastoreServer(hiveConf); + case ExternalHiveServer2: + LOGGER.info("Creating ExternalHiveServer"); + return new ExternalHiveServer(hiveConf, confDir, logDir); + default: + throw new UnsupportedOperationException(type.name()); + } + } + private static int findPort() throws IOException { + ServerSocket socket = new ServerSocket(0); + int port = socket.getLocalPort(); + socket.close(); + return port; + } + + @VisibleForTesting + public static enum HiveServer2Type { + EmbeddedHiveServer2, // Embedded HS2, directly executed by JDBC, without thrift + InternalHiveServer2, // Start a thrift HS2 in the same process + InternalMetastore, // Start a thrift HS2 in the same process + ExternalHiveServer2, // start a remote thrift HS2 + UnmanagedHiveServer2 // Use a remote thrift HS2 already running + ; + } + + public static boolean isInternalServer(HiveServer2Type hs2Type) { + return (HiveServer2Type.InternalHiveServer2.equals(hs2Type) || HiveServer2Type.InternalMetastore + .equals(hs2Type)); + } +} http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalHiveServer.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalHiveServer.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalHiveServer.java new file mode 100644 index 0000000..45f0ef2 --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalHiveServer.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sentry.tests.e2e.hive.hiveserver; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hive.service.server.HiveServer2; + +public class InternalHiveServer extends AbstractHiveServer { + + private final HiveServer2 hiveServer2; + private final HiveConf conf; + + public InternalHiveServer(HiveConf conf) throws Exception { + super(conf, getHostname(conf), getPort(conf)); + hiveServer2 = new HiveServer2(); + this.conf = conf; + } + + @Override + public synchronized void start() throws Exception { + hiveServer2.init(conf); + hiveServer2.start(); + waitForStartup(this); + } + + @Override + public synchronized void shutdown() throws Exception { + if (hiveServer2 != null) { + hiveServer2.stop(); + } + } +} http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalMetastoreServer.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalMetastoreServer.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalMetastoreServer.java new file mode 100644 index 0000000..bf43798 --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/InternalMetastoreServer.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sentry.tests.e2e.hive.hiveserver; + +import java.net.URI; +import java.util.concurrent.Callable; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.metastore.HiveMetaStore; +import org.apache.hadoop.hive.shims.ShimLoader; + +public class InternalMetastoreServer extends AbstractHiveServer { + private final HiveConf conf; + private ExecutorService metaStoreExecutor = Executors + .newSingleThreadExecutor(); + + public InternalMetastoreServer(HiveConf conf) throws Exception { + super(conf, getMetastoreHostname(conf), getMetastorePort(conf)); + this.conf = conf; + } + + @Override + public String getURL() { + return "jdbc:hive2://"; + } + + @Override + public void start() throws Exception { + startMetastore(); + } + + @Override + public void shutdown() throws Exception { + metaStoreExecutor.shutdown(); + } + + // async metastore startup since Hive doesn't have that option + private void startMetastore() throws Exception { + Callable<Void> metastoreService = new Callable<Void>() { + public Void call() throws Exception { + try { + HiveMetaStore.startMetaStore(getMetastorePort(conf), + ShimLoader.getHadoopThriftAuthBridge(), conf); + } catch (Throwable e) { + throw new Exception("Error starting metastore", e); + } + return null; + } + }; + metaStoreExecutor.submit(metastoreService); + } + + private static String getMetastoreHostname(Configuration conf) + throws Exception { + return new URI(conf.get(HiveConf.ConfVars.METASTOREURIS.varname)).getHost(); + } + + private static int getMetastorePort(Configuration conf) throws Exception { + return new URI(conf.get(HiveConf.ConfVars.METASTOREURIS.varname)).getPort(); + + } +} http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/UnmanagedHiveServer.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/UnmanagedHiveServer.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/UnmanagedHiveServer.java new file mode 100644 index 0000000..beae8e8 --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/hive/hiveserver/UnmanagedHiveServer.java @@ -0,0 +1,113 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.tests.e2e.hive.hiveserver; + +import java.sql.Connection; +import java.sql.DriverManager; +import java.util.Properties; + +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.security.UserGroupInformation; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.common.base.Preconditions; + +public class UnmanagedHiveServer implements HiveServer { + private static final Logger LOGGER = LoggerFactory.getLogger(UnmanagedHiveServer.class); + private static final String HS2_HOST = HiveConf.ConfVars.HIVE_SERVER2_THRIFT_BIND_HOST.varname; + private static final String HS2_PORT = HiveConf.ConfVars.HIVE_SERVER2_THRIFT_PORT.varname; + private static final String HS2_AUTH = HiveConf.ConfVars.HIVE_SERVER2_AUTHENTICATION.varname; + private static final String HS2_PRINCIPAL = HiveConf.ConfVars.HIVE_SERVER2_KERBEROS_PRINCIPAL.varname; + private static final String KEYTAB_LOCATION = System.getProperty("sentry.e2e.hive.keytabs.location"); + private static final String AUTHENTICATION_TYPE = System.getProperty(HS2_AUTH, "kerberos"); + + private String hostname; + private String port; + + private String hivePrincipal; + private HiveConf hiveConf; + + public UnmanagedHiveServer() { + hiveConf = new HiveConf(); + hostname = getSystemAndConfigProperties(HS2_HOST, null); + port = getSystemAndConfigProperties(HS2_PORT, "10000"); + if(AUTHENTICATION_TYPE.equalsIgnoreCase("kerberos")){ + hivePrincipal = getSystemAndConfigProperties(HS2_PRINCIPAL, null); + } + } + + private String getSystemAndConfigProperties(String hiveVar, String defaultVal){ + String val = hiveConf.get(hiveVar); + if(val == null || val.trim().equals("")){ + LOGGER.warn(hiveVar + " not found in the client hive-site.xml"); + if(defaultVal == null) { + val = System.getProperty(hiveVar); + }else { + val = System.getProperty(hiveVar, defaultVal); + } + Preconditions.checkNotNull(val, "Required system property is missed: Provide it using -D"+ hiveVar); + LOGGER.info("Using from system property" + hiveVar + " = " + val ); + }else { + LOGGER.info("Using from hive-site.xml" + hiveVar + " = " + val ); + } + return val; + } + + @Override + public void start() throws Exception { + //For Unmanaged HiveServer, service need not be started within the test + } + + @Override + public void shutdown() throws Exception { + //For Unmanaged HiveServer, service need not be stopped within the test + } + + @Override + public String getURL() { + return "jdbc:hive2://" + hostname + ":" + port + "/default;"; + } + + @Override + public String getProperty(String key) { + if(key.equalsIgnoreCase(HiveConf.ConfVars.METASTOREWAREHOUSE.varname)) { + return "hdfs://" + getSystemAndConfigProperties(key, null); //UnManagedHiveServer returns the warehouse directory without hdfs:// + } + return getSystemAndConfigProperties(key, null); + } + + @Override + public Connection createConnection(String user, String password) throws Exception{ + String url = getURL(); + Properties oProps = new Properties(); + + if(AUTHENTICATION_TYPE.equalsIgnoreCase("kerberos")){ + kinit(user); + url += "principal=" + hivePrincipal; + }else{ + oProps.setProperty("user",user); + oProps.setProperty("password",password); + } + LOGGER.info("url: " + url); + return DriverManager.getConnection(url, oProps); + } + public void kinit(String user) throws Exception{ + UserGroupInformation.loginUserFromKeytab(user, KEYTAB_LOCATION + "/" + user + ".keytab"); + LOGGER.info("Kinited user: "+ user+" keytab: "+KEYTAB_LOCATION+"/"+user+".keytab"); + } +}
