SENTRY-514: Enable e2e tests for authorization V2 (Dapeng Sun, reviewed by Anne Yu)
Project: http://git-wip-us.apache.org/repos/asf/sentry/repo Commit: http://git-wip-us.apache.org/repos/asf/sentry/commit/bfb354f2 Tree: http://git-wip-us.apache.org/repos/asf/sentry/tree/bfb354f2 Diff: http://git-wip-us.apache.org/repos/asf/sentry/diff/bfb354f2 Branch: refs/heads/master Commit: bfb354f2b3226c28a7750124acf38e48c6009645 Parents: 30f6807 Author: Sun Dapeng <[email protected]> Authored: Wed Mar 30 09:28:15 2016 +0800 Committer: Sun Dapeng <[email protected]> Committed: Wed Mar 30 09:48:47 2016 +0800 ---------------------------------------------------------------------- sentry-tests/sentry-tests-hive-v2/pom.xml | 505 ++++ .../dbprovider/AbstractTestWithDbProvider.java | 179 ++ .../e2e/dbprovider/TestColumnEndToEnd.java | 417 ++++ .../e2e/dbprovider/TestConcurrentClients.java | 343 +++ .../e2e/dbprovider/TestDatabaseProvider.java | 2180 ++++++++++++++++++ .../TestDbColumnLevelMetaDataOps.java | 374 +++ .../tests/e2e/dbprovider/TestDbComplexView.java | 314 +++ .../tests/e2e/dbprovider/TestDbConnections.java | 150 ++ .../tests/e2e/dbprovider/TestDbCrossDbOps.java | 40 + .../tests/e2e/dbprovider/TestDbDDLAuditLog.java | 293 +++ .../tests/e2e/dbprovider/TestDbEndToEnd.java | 251 ++ .../TestDbExportImportPrivileges.java | 44 + .../e2e/dbprovider/TestDbJDBCInterface.java | 45 + .../TestDbMetadataObjectRetrieval.java | 44 + .../dbprovider/TestDbMetadataPermissions.java | 39 + .../dbprovider/TestDbMovingToProduction.java | 38 + .../tests/e2e/dbprovider/TestDbOperations.java | 37 + .../dbprovider/TestDbPrivilegeAtTransform.java | 38 + .../TestDbPrivilegeCleanupOnDrop.java | 354 +++ .../TestDbPrivilegesAtColumnScope.java | 38 + .../TestDbPrivilegesAtDatabaseScope.java | 46 + .../TestDbPrivilegesAtFunctionScope.java | 39 + .../TestDbPrivilegesAtTableScope.java | 39 + .../TestDbRuntimeMetadataRetrieval.java | 46 + .../tests/e2e/dbprovider/TestDbSandboxOps.java | 49 + .../TestDbSentryOnFailureHookLoading.java | 271 +++ .../e2e/dbprovider/TestDbUriPermissions.java | 45 + .../TestPrivilegeWithGrantOption.java | 269 +++ .../TestPrivilegeWithHAGrantOption.java | 161 ++ .../sentry/tests/e2e/ha/TestHaEnd2End.java | 171 ++ .../tests/e2e/hdfs/TestHDFSIntegration.java | 1836 +++++++++++++++ .../e2e/hdfs/TestHDFSIntegrationWithHA.java | 28 + .../e2e/hive/AbstractTestWithHiveServer.java | 94 + .../AbstractTestWithStaticConfiguration.java | 730 ++++++ .../apache/sentry/tests/e2e/hive/Context.java | 321 +++ .../e2e/hive/DummySentryOnFailureHook.java | 44 + .../sentry/tests/e2e/hive/PolicyFileEditor.java | 78 + .../tests/e2e/hive/PrivilegeResultSet.java | 124 + .../sentry/tests/e2e/hive/StaticUserGroup.java | 55 + .../sentry/tests/e2e/hive/TestConfigTool.java | 346 +++ .../sentry/tests/e2e/hive/TestCrossDbOps.java | 669 ++++++ .../e2e/hive/TestCustomSerdePrivileges.java | 120 + .../sentry/tests/e2e/hive/TestEndToEnd.java | 128 + .../e2e/hive/TestExportImportPrivileges.java | 162 ++ .../tests/e2e/hive/TestJDBCInterface.java | 228 ++ .../tests/e2e/hive/TestLockPrivileges.java | 214 ++ .../e2e/hive/TestMetadataObjectRetrieval.java | 501 ++++ .../tests/e2e/hive/TestMetadataPermissions.java | 128 + .../tests/e2e/hive/TestMovingToProduction.java | 220 ++ .../sentry/tests/e2e/hive/TestOperations.java | 1116 +++++++++ .../tests/e2e/hive/TestPerDBConfiguration.java | 408 ++++ .../e2e/hive/TestPerDatabasePolicyFile.java | 118 + .../tests/e2e/hive/TestPolicyImportExport.java | 195 ++ .../e2e/hive/TestPrivilegeAtTransform.java | 118 + .../e2e/hive/TestPrivilegesAtColumnScope.java | 518 +++++ .../e2e/hive/TestPrivilegesAtDatabaseScope.java | 399 ++++ .../e2e/hive/TestPrivilegesAtFunctionScope.java | 262 +++ .../e2e/hive/TestPrivilegesAtTableScope.java | 662 ++++++ .../tests/e2e/hive/TestReloadPrivileges.java | 54 + .../e2e/hive/TestRuntimeMetadataRetrieval.java | 429 ++++ .../sentry/tests/e2e/hive/TestSandboxOps.java | 529 +++++ .../hive/TestSentryOnFailureHookLoading.java | 134 ++ .../tests/e2e/hive/TestServerConfiguration.java | 265 +++ .../tests/e2e/hive/TestUriPermissions.java | 262 +++ .../tests/e2e/hive/TestUserManagement.java | 396 ++++ .../tests/e2e/hive/TestViewPrivileges.java | 138 ++ .../sentry/tests/e2e/hive/fs/AbstractDFS.java | 87 + .../sentry/tests/e2e/hive/fs/ClusterDFS.java | 69 + .../apache/sentry/tests/e2e/hive/fs/DFS.java | 32 + .../sentry/tests/e2e/hive/fs/DFSFactory.java | 49 + .../sentry/tests/e2e/hive/fs/MiniDFS.java | 94 + .../e2e/hive/hiveserver/AbstractHiveServer.java | 95 + .../e2e/hive/hiveserver/EmbeddedHiveServer.java | 60 + .../e2e/hive/hiveserver/ExternalHiveServer.java | 124 + .../tests/e2e/hive/hiveserver/HiveServer.java | 34 + .../e2e/hive/hiveserver/HiveServerFactory.java | 296 +++ .../e2e/hive/hiveserver/InternalHiveServer.java | 47 + .../hiveserver/InternalMetastoreServer.java | 80 + .../hive/hiveserver/UnmanagedHiveServer.java | 113 + ...actMetastoreTestWithStaticConfiguration.java | 218 ++ .../metastore/SentryPolicyProviderForDb.java | 163 ++ .../metastore/TestAuthorizingObjectStore.java | 1106 +++++++++ .../e2e/metastore/TestMetaStoreWithPigHCat.java | 113 + .../e2e/metastore/TestMetastoreEndToEnd.java | 628 +++++ .../tests/e2e/minisentry/InternalSentrySrv.java | 270 +++ .../sentry/tests/e2e/minisentry/SentrySrv.java | 100 + .../tests/e2e/minisentry/SentrySrvFactory.java | 45 + .../resources/core-site-for-sentry-test.xml | 34 + .../src/test/resources/emp.dat | 12 + .../src/test/resources/hadoop | 107 + .../src/test/resources/kv1.dat | 500 ++++ .../src/test/resources/log4j.properties | 35 + .../src/test/resources/sentry-provider.ini | 25 + .../src/test/resources/sentry-site.xml | 33 + .../src/test/resources/testPolicyImport.ini | 25 + .../test/resources/testPolicyImportAdmin.ini | 22 + .../test/resources/testPolicyImportError.ini | 21 + .../scale-test/create-many-dbs-tables.sh | 277 +++ 98 files changed, 23802 insertions(+) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/pom.xml ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/pom.xml b/sentry-tests/sentry-tests-hive-v2/pom.xml index 90d20a7..b6590bd 100644 --- a/sentry-tests/sentry-tests-hive-v2/pom.xml +++ b/sentry-tests/sentry-tests-hive-v2/pom.xml @@ -26,4 +26,509 @@ limitations under the License. <artifactId>sentry-tests-hive-v2</artifactId> <name>Sentry Hive Tests v2</name> <description>end to end tests for sentry-hive-v2 integration</description> + <properties> + <!-- + <hadoop-dist>.</hadoop-dist> + <hive-dist>${hadoop-dist}</hive-dist> + --> + <HADOOP_CONF_DIR>${env.HADOOP_CONF_DIR}</HADOOP_CONF_DIR> + <HIVE_CONF_DIR>${env.HIVE_CONF_DIR}</HIVE_CONF_DIR> + </properties> + <dependencies> + <dependency> + <groupId>org.apache.thrift</groupId> + <artifactId>libthrift</artifactId> + <exclusions> + <exclusion> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpclient</artifactId> + </exclusion> + <exclusion> + <groupId>org.apache.httpcomponents</groupId> + <artifactId>httpcore</artifactId> + </exclusion> + </exclusions> + </dependency> + <dependency> + <groupId>org.apache.derby</groupId> + <artifactId>derby</artifactId> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-service</artifactId> + <version>${hive-v2.version}</version> + <exclusions> + <exclusion> + <groupId>ant</groupId> + <artifactId>ant</artifactId> + </exclusion> + </exclusions> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-shims</artifactId> + <version>${hive-v2.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-serde</artifactId> + <version>${hive-v2.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-metastore</artifactId> + <exclusions> + <exclusion> + <groupId>org.datanucleus</groupId> + <artifactId>datanucleus-core</artifactId> + </exclusion> + <exclusion> + <groupId>org.datanucleus</groupId> + <artifactId>datanucleus-rdbms</artifactId> + </exclusion> + </exclusions> + <version>${hive-v2.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-jdbc</artifactId> + <version>${hive-v2.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-hbase-handler</artifactId> + <version>${hive-v2.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-exec</artifactId> + <exclusions> + <exclusion> + <groupId>org.datanucleus</groupId> + <artifactId>datanucleus-core</artifactId> + </exclusion> + <exclusion> + <groupId>org.datanucleus</groupId> + <artifactId>datanucleus-rdbms</artifactId> + </exclusion> + </exclusions> + <version>${hive-v2.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-contrib</artifactId> + <version>${hive-v2.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-common</artifactId> + <version>${hive-v2.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-cli</artifactId> + <version>${hive-v2.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hive.hcatalog</groupId> + <artifactId>hive-hcatalog-core</artifactId> + <version>${hive-v2.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hive.hcatalog</groupId> + <artifactId>hive-hcatalog-pig-adapter</artifactId> + <version>${hive-v2.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hive</groupId> + <artifactId>hive-beeline</artifactId> + <version>${hive-v2.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-common</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-annotations</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-auth</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-hdfs</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-common</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-core</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-jobclient</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-shuffle</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-api</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-common</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-server-common</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-yarn-server-nodemanager</artifactId> + <version>${hadoop.version}</version> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.pig</groupId> + <artifactId>pig</artifactId> + <version>${pig.version}</version> + <classifier>h2</classifier> + <scope>test</scope> + </dependency> + <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + </dependency> + <dependency> + <groupId>org.easytesting</groupId> + <artifactId>fest-reflect</artifactId> + </dependency> + <dependency> + <groupId>org.apache.sentry</groupId> + <artifactId>sentry-binding-hive-v2</artifactId> + </dependency> + <dependency> + <groupId>org.apache.shiro</groupId> + <artifactId>shiro-core</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.sentry</groupId> + <artifactId>sentry-provider-db</artifactId> + <exclusions> + <exclusion> + <groupId>org.apache.hive</groupId> + <artifactId>hive-beeline</artifactId> + </exclusion> + </exclusions> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.sentry</groupId> + <artifactId>sentry-provider-file</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.sentry</groupId> + <artifactId>sentry-hdfs-common</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.sentry</groupId> + <artifactId>sentry-hdfs-service</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.sentry</groupId> + <artifactId>sentry-hdfs-namenode-plugin</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.sentry</groupId> + <artifactId>sentry-policy-db</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-minicluster</artifactId> + <scope>test</scope> +<!-- + <exclusions> + <exclusion> + <groupId>org.apache.hadoop</groupId> + <artifactId>hadoop-mapreduce-client-jobclient</artifactId> + </exclusion> + </exclusions> +--> + </dependency> + <dependency> + <groupId>org.hamcrest</groupId> + <artifactId>hamcrest-all</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.curator</groupId> + <artifactId>curator-framework</artifactId> + <scope>test</scope> + </dependency> + <dependency> + <groupId>org.apache.zookeeper</groupId> + <artifactId>zookeeper</artifactId> + <exclusions> + <exclusion> + <groupId>org.jboss.netty</groupId> + <artifactId>netty</artifactId> + </exclusion> + </exclusions> + <scope>test</scope> + </dependency> + <dependency> + <groupId>joda-time</groupId> + <artifactId>joda-time</artifactId> + <version>${joda-time.version}</version> + <scope>test</scope> + </dependency> + </dependencies> + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-surefire-plugin</artifactId> + <configuration> + <reuseForks>false</reuseForks> + <systemPropertyVariables> + <buildDirectory>${project.build.directory}</buildDirectory> + </systemPropertyVariables> + <excludes> + <exclude>**/TestHDFSIntegration.java</exclude> + <exclude>**/TestHDFSIntegrationWithHA.java</exclude> + </excludes> + </configuration> + </plugin> + </plugins> + <pluginManagement> + <plugins> + <!--This plugin's configuration is used to store Eclipse m2e settings only. It has no influence on the Maven build itself.--> + <plugin> + <groupId>org.eclipse.m2e</groupId> + <artifactId>lifecycle-mapping</artifactId> + <version>1.0.0</version> + <configuration> + <lifecycleMappingMetadata> + <pluginExecutions> + <pluginExecution> + <pluginExecutionFilter> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-antrun-plugin</artifactId> + <versionRange>[1.7,)</versionRange> + <goals> + <goal>run</goal> + </goals> + </pluginExecutionFilter> + <action> + <ignore></ignore> + </action> + </pluginExecution> + </pluginExecutions> + </lifecycleMappingMetadata> + </configuration> + </plugin> + </plugins> + </pluginManagement> + </build> + <profiles> + <profile> + <id>link-hadoop</id> + <activation> + <activeByDefault>false</activeByDefault> + <property><name>!skipTests</name></property> + </activation> + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-surefire-plugin</artifactId> + <configuration> + <environmentVariables> + <SENTRY_HADOOP_TEST_CLASSPATH>${maven.test.classpath}</SENTRY_HADOOP_TEST_CLASSPATH> + </environmentVariables> + </configuration> + </plugin> + </plugins> + </build> + </profile> + <profile> + <id>cluster-hadoop</id> + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-enforcer-plugin</artifactId> + <version>1.0</version> + <executions> + <execution> + <id>enforce-property</id> + <goals> + <goal>enforce</goal> + </goals> + <configuration> + <rules> + <requireProperty> + <property>HADOOP_CONF_DIR</property> + <message>HADOOP_CONF_DIR env. variable has to be set</message> + </requireProperty> + <requireProperty> + <property>HIVE_CONF_DIR</property> + <message>HIVE_CONF_DIR env. variable has to be set</message> + </requireProperty> + </rules> + <fail>true</fail> + </configuration> + </execution> + </executions> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-surefire-plugin</artifactId> + <configuration> + <systemPropertyVariables> + <buildDirectory>${project.build.directory}</buildDirectory> + </systemPropertyVariables> + <additionalClasspathElements> + <additionalClasspathElement>${HIVE_CONF_DIR}</additionalClasspathElement> + <additionalClasspathElement>${HADOOP_CONF_DIR}</additionalClasspathElement> + </additionalClasspathElements> + <includes> + <include>**/TestCrossDbOps.java</include> + <include>**/TestEndToEnd.java</include> + <include>**/TestMetadataObjectRetrieval.java</include> + <include>**/TestMetadataPermissions.java</include> + <include>**/TestMovingToProduction.java</include> + <include>**/TestPerDatabasePolicyFile.java</include> + <include>**/TestPrivilegeAtTransform.java</include> + <include>**/TestPrivilegesAtDatabaseScope.java</include> + <include>**/TestPrivilegesAtTableScope.java</include> + <include>**/TestSandboxOps.java</include> + <include>**/TestExportImportPrivileges.java</include> + <include>**/TestUriPermissions.java</include> + <include>**/TestRuntimeMetadataRetrieval.java</include> + <include>**/TestOperations.java</include> + <include>**/TestPrivilegesAtColumnScope.java</include> + </includes> + <argLine>-Dsentry.e2etest.hiveServer2Type=UnmanagedHiveServer2 -Dsentry.e2etest.DFSType=ClusterDFS</argLine> + </configuration> + </plugin> + </plugins> + </build> + </profile> + <profile> + <id>cluster-hadoop-provider-db</id> + <build> + <plugins> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-enforcer-plugin</artifactId> + <version>1.0</version> + <executions> + <execution> + <id>enforce-property</id> + <goals> + <goal>enforce</goal> + </goals> + <configuration> + <rules> + <requireProperty> + <property>HADOOP_CONF_DIR</property> + <message>HADOOP_CONF_DIR env. variable has to be set</message> + </requireProperty> + <requireProperty> + <property>HIVE_CONF_DIR</property> + <message>HIVE_CONF_DIR env. variable has to be set</message> + </requireProperty> + </rules> + <fail>true</fail> + </configuration> + </execution> + </executions> + </plugin> + <plugin> + <groupId>org.apache.maven.plugins</groupId> + <artifactId>maven-surefire-plugin</artifactId> + <configuration> + <systemPropertyVariables> + <buildDirectory>${project.build.directory}</buildDirectory> + </systemPropertyVariables> + <additionalClasspathElements> + <additionalClasspathElement>${HIVE_CONF_DIR}</additionalClasspathElement> + <additionalClasspathElement>${HADOOP_CONF_DIR}</additionalClasspathElement> + </additionalClasspathElements> + <includes> + <include>**/TestDbCrossDbOps.java</include> + <include>**/TestDbEndToEnd.java</include> + <include>**/TestDbMetadataObjectRetrieval.java</include> + <include>**/TestDbMetadataPermissions.java</include> + <include>**/TestDbMovingToProduction.java</include> + <include>**/TestDbPerDatabasePolicyFile.java</include> + <include>**/TestDbPrivilegeAtTransform.java</include> + <include>**/TestDbPrivilegesAtDatabaseScope.java</include> + <include>**/TestDbPrivilegesAtTableScope.java</include> + <include>**/TestDbSandboxOps.java</include> + <include>**/TestDbExportImportPrivileges.java</include> + <include>**/TestDbUriPermissions.java</include> + <include>**/TestDbRuntimeMetadataRetrieval.java</include> + <include>**/TestDatabaseProvider.java</include> + <include>**/TestDbOperations.java</include> + <include>**/TestPrivilegeWithGrantOption.java</include> + <include>**/TestDbPrivilegesAtColumnScope.java</include> + <include>**/TestColumnEndToEnd.java</include> + <include>**/TestDbComplexView.java</include> + <include>**/TestConcurrentClients</include> + </includes> + <argLine>-Dsentry.e2etest.hiveServer2Type=UnmanagedHiveServer2 -Dsentry.e2etest.DFSType=ClusterDFS -Dsentry.e2etest.external.sentry=true</argLine> + </configuration> + </plugin> + </plugins> + </build> + </profile> + </profiles> </project> http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/dbprovider/AbstractTestWithDbProvider.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/dbprovider/AbstractTestWithDbProvider.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/dbprovider/AbstractTestWithDbProvider.java new file mode 100644 index 0000000..d843829 --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/dbprovider/AbstractTestWithDbProvider.java @@ -0,0 +1,179 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sentry.tests.e2e.dbprovider; + +import java.io.File; +import java.sql.Connection; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeoutException; + +import org.apache.commons.io.FileUtils; +import org.apache.curator.test.TestingServer; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hive.conf.HiveConf; +import org.apache.hadoop.hive.conf.HiveConf.ConfVars; +import org.apache.sentry.binding.hive.v2.SentryHiveAuthorizationTaskFactoryImplV2; +import org.apache.sentry.provider.db.SimpleDBProviderBackend; +import org.apache.sentry.provider.file.PolicyFile; +import org.apache.sentry.service.thrift.SentryService; +import org.apache.sentry.service.thrift.SentryServiceFactory; +import org.apache.sentry.service.thrift.ServiceConstants.ClientConfig; +import org.apache.sentry.service.thrift.ServiceConstants.ServerConfig; +import org.apache.sentry.tests.e2e.hive.AbstractTestWithHiveServer; +import org.apache.sentry.tests.e2e.hive.Context; +import org.apache.sentry.tests.e2e.hive.StaticUserGroup; +import org.apache.sentry.tests.e2e.hive.hiveserver.HiveServerFactory; +import org.junit.AfterClass; +import org.junit.BeforeClass; + +import com.google.common.base.Preconditions; +import com.google.common.collect.Maps; +import com.google.common.io.Files; + +public abstract class AbstractTestWithDbProvider extends AbstractTestWithHiveServer { + + protected static final String SERVER_HOST = "localhost"; + + protected static Map<String, String> properties = Maps.newHashMap(); + private static File dbDir; + private static int sentryServerCount = 1; + private static List<SentryService> servers = new ArrayList<SentryService>(sentryServerCount); + private static Configuration conf; + private static PolicyFile policyFile; + private static File policyFilePath; + protected static Context context; + + protected static boolean haEnabled; + private static TestingServer zkServer; + + @BeforeClass + public static void setupTest() throws Exception { + } + + public static void createContext() throws Exception { + conf = new Configuration(false); + policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP); + properties.put(HiveServerFactory.AUTHZ_PROVIDER_BACKEND, SimpleDBProviderBackend.class.getName()); + properties.put(ConfVars.HIVE_AUTHORIZATION_TASK_FACTORY.varname, + SentryHiveAuthorizationTaskFactoryImplV2.class.getName()); + properties.put(ServerConfig.SECURITY_MODE, ServerConfig.SECURITY_MODE_NONE); + properties.put(ServerConfig.ADMIN_GROUPS, ADMINGROUP); + properties.put(ServerConfig.RPC_ADDRESS, SERVER_HOST); + properties.put(ServerConfig.RPC_PORT, String.valueOf(0)); + dbDir = new File(Files.createTempDir(), "sentry_policy_db"); + properties.put(ServerConfig.SENTRY_STORE_JDBC_URL, + "jdbc:derby:;databaseName=" + dbDir.getPath() + ";create=true"); + properties.put(ServerConfig.SENTRY_STORE_JDBC_PASS, "dummy"); + properties.put(ServerConfig.SENTRY_VERIFY_SCHEM_VERSION, "false"); + properties.put(ServerConfig.SENTRY_STORE_GROUP_MAPPING, + ServerConfig.SENTRY_STORE_LOCAL_GROUP_MAPPING); + policyFilePath = new File(Files.createTempDir(), "sentry-policy-file.ini"); + properties.put(ServerConfig.SENTRY_STORE_GROUP_MAPPING_RESOURCE, + policyFilePath.getPath()); + if (haEnabled) { + zkServer = new TestingServer(); + zkServer.start(); + properties.put(ServerConfig.SENTRY_HA_ENABLED, "true"); + properties.put(ServerConfig.SENTRY_HA_ZOOKEEPER_NAMESPACE, "sentry-test"); + properties.put(ServerConfig.SENTRY_HA_ZOOKEEPER_QUORUM, zkServer.getConnectString()); + } + for (Map.Entry<String, String> entry : properties.entrySet()) { + conf.set(entry.getKey(), entry.getValue()); + } + for (int i = 0; i < sentryServerCount; i++) { + SentryService server = new SentryServiceFactory().create(new Configuration(conf)); + servers.add(server); + properties.put(ClientConfig.SERVER_RPC_ADDRESS, server.getAddress() + .getHostName()); + properties.put(ClientConfig.SERVER_RPC_PORT, + String.valueOf(server.getAddress().getPort())); + } + properties.put(HiveConf.ConfVars.METASTORE_AUTO_CREATE_ALL.varname, "true"); + context = AbstractTestWithHiveServer.createContext(properties); + policyFile + .setUserGroupMapping(StaticUserGroup.getStaticMapping()) + .write(context.getPolicyFile(), policyFilePath); + + startSentryService(); + } + + @AfterClass + public static void tearDown() throws Exception { + for (SentryService server : servers) { + if (server != null) { + server.stop(); + } + } + if (context != null) { + context.close(); + } + if (dbDir != null) { + FileUtils.deleteQuietly(dbDir); + } + if (zkServer != null) { + zkServer.stop(); + } + } + + protected void setupAdmin(Context context) throws Exception { + Connection connection = context.createConnection(ADMIN1); + Statement statement = connection.createStatement(); + statement.execute("CREATE ROLE admin_role"); + statement.execute("GRANT ALL ON SERVER " + + HiveServerFactory.DEFAULT_AUTHZ_SERVER_NAME + " TO ROLE admin_role"); + statement.execute("GRANT ROLE admin_role TO GROUP " + ADMINGROUP); + statement.close(); + connection.close(); + } + + private static void startSentryService() throws Exception { + for (SentryService server : servers) { + server.start(); + final long start = System.currentTimeMillis(); + while(!server.isRunning()) { + Thread.sleep(1000); + if(System.currentTimeMillis() - start > 60000L) { + throw new TimeoutException("Server did not start after 60 seconds"); + } + } + } + } + + protected void shutdownAllSentryService() throws Exception { + for (SentryService server : servers) { + if (server != null) { + server.stop(); + } + } + servers = null; + } + + protected void startSentryService(int serverCount) throws Exception { + Preconditions.checkArgument((serverCount > 0), "Server count should > 0."); + servers = new ArrayList<SentryService>(serverCount); + for (int i = 0; i < sentryServerCount; i++) { + SentryService server = new SentryServiceFactory().create(new Configuration(conf)); + servers.add(server); + } + startSentryService(); + } + +} http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestColumnEndToEnd.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestColumnEndToEnd.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestColumnEndToEnd.java new file mode 100644 index 0000000..32d0a61 --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestColumnEndToEnd.java @@ -0,0 +1,417 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.sentry.tests.e2e.dbprovider; + +import static org.junit.Assert.*; + +import java.io.File; +import java.io.FileOutputStream; +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.SQLException; +import java.sql.Statement; +import java.util.ArrayList; +import java.util.List; + +import org.apache.sentry.provider.db.SentryAccessDeniedException; +import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; + +import com.google.common.io.Resources; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class TestColumnEndToEnd extends AbstractTestWithStaticConfiguration { + private static final Logger LOGGER = LoggerFactory. + getLogger(TestColumnEndToEnd.class); + + private final String SINGLE_TYPE_DATA_FILE_NAME = "kv1.dat"; + private File dataFile; + + @BeforeClass + public static void setupTestStaticConfiguration() throws Exception{ + LOGGER.info("TestColumnEndToEnd setupTestStaticConfiguration"); + useSentryService = true; + AbstractTestWithStaticConfiguration.setupTestStaticConfiguration(); + } + + @Override + @Before + public void setup() throws Exception { + super.setupAdmin(); + super.setup(); + super.setupPolicy(); + dataFile = new File(dataDir, SINGLE_TYPE_DATA_FILE_NAME); + FileOutputStream to = new FileOutputStream(dataFile); + Resources.copy(Resources.getResource(SINGLE_TYPE_DATA_FILE_NAME), to); + to.close(); + } + + @Test + public void testBasic() throws Exception { + Connection connection = context.createConnection(ADMIN1); + Statement statement = context.createStatement(connection); + statement.execute("CREATE database " + DB1); + statement.execute("USE " + DB1); + statement.execute("CREATE TABLE t1 (c1 string)"); + statement.execute("CREATE ROLE user_role"); + statement.execute("GRANT SELECT ON TABLE t1 TO ROLE user_role"); + statement.execute("GRANT ROLE user_role TO GROUP " + USERGROUP1); + statement.close(); + connection.close(); + connection = context.createConnection(USER1_1); + statement = context.createStatement(connection); + context.assertSentryException(statement, "CREATE ROLE r2", + SentryAccessDeniedException.class.getSimpleName()); + + statement.execute("SELECT * FROM " + DB1 + ".t1"); + statement.close(); + connection.close(); + } + + @Test + public void testDescribeTbl() throws Exception { + Connection connection = context.createConnection(ADMIN1); + Statement statement = context.createStatement(connection); + statement.execute("CREATE TABLE IF NOT EXISTS t1 (c1 string, c2 string)"); + statement.execute("CREATE TABLE t2 (c1 string, c2 string)"); + statement.execute("CREATE ROLE user_role1"); + statement.execute("GRANT SELECT (c1) ON TABLE t1 TO ROLE user_role1"); + statement.execute("GRANT ROLE user_role1 TO GROUP " + USERGROUP1); + statement.close(); + connection.close(); + + connection = context.createConnection(USER1_1); + statement = context.createStatement(connection); + + // Expect that DESCRIBE table works with only column-level privileges, but other + // DESCRIBE variants like DESCRIBE FORMATTED fail. Note that if a user has privileges + // on any column they can describe all columns. + ResultSet rs = statement.executeQuery("DESCRIBE t1"); + assertTrue(rs.next()); + assertEquals("c1", rs.getString(1)); + assertEquals("string", rs.getString(2)); + assertTrue(rs.next()); + assertEquals("c2", rs.getString(1)); + assertEquals("string", rs.getString(2)); + + statement.executeQuery("DESCRIBE t1 c1"); + statement.executeQuery("DESCRIBE t1 c2"); + + try { + statement.executeQuery("DESCRIBE t2"); + fail("Expected DESCRIBE to fail on t2"); + } catch (SQLException e) { + context.verifyAuthzException(e); + } + + try { + statement.executeQuery("DESCRIBE FORMATTED t1"); + fail("Expected DESCRIBE FORMATTED to fail"); + } catch (SQLException e) { + context.verifyAuthzException(e); + } + + try { + statement.executeQuery("DESCRIBE EXTENDED t1"); + fail("Expected DESCRIBE EXTENDED to fail"); + } catch (SQLException e) { + context.verifyAuthzException(e); + } + statement.close(); + connection.close(); + + // Cleanup + connection = context.createConnection(ADMIN1); + statement = context.createStatement(connection); + statement.execute("DROP TABLE t1"); + statement.execute("DROP TABLE t2"); + statement.execute("DROP ROLE user_role1"); + statement.close(); + connection.close(); + } + + @Test + public void testNegative() throws Exception { + Connection connection = context.createConnection(ADMIN1); + Statement statement = context.createStatement(connection); + statement.execute("CREATE TABLE t1 (c1 string, c2 string)"); + statement.execute("CREATE ROLE user_role1"); + statement.execute("CREATE ROLE user_role2"); + statement.execute("GRANT SELECT (c1) ON TABLE t1 TO ROLE user_role1"); + statement.execute("GRANT SELECT (c1,c2) ON TABLE t1 TO ROLE user_role2"); + + //Make sure insert/all are not supported + try { + statement.execute("GRANT INSERT (c2) ON TABLE t1 TO ROLE user_role2"); + assertTrue("Sentry should not support privilege: Insert on Column", false); + } catch (Exception e) { + assertTrue("The error should be 'Sentry does not support privilege: Insert on Column'", + e.getMessage().toUpperCase().contains("SENTRY DOES NOT SUPPORT PRIVILEGE: INSERT ON COLUMN")); + } + try { + statement.execute("GRANT ALL (c2) ON TABLE t1 TO ROLE user_role2"); + assertTrue("Sentry should not support privilege: ALL on Column", false); + } catch (Exception e) { + assertTrue("The error should be 'Sentry does not support privilege: All on Column'", + e.getMessage().toUpperCase().contains("SENTRY DOES NOT SUPPORT PRIVILEGE: ALL ON COLUMN")); + } + statement.execute("GRANT ROLE user_role1 TO GROUP " + USERGROUP1); + statement.execute("GRANT ROLE user_role2 TO GROUP " + USERGROUP2); + statement.close(); + connection.close(); + + /* + Behavior of select col, select count(col), select *, and select count(*), count(1) + */ + // 1.1 user_role1 select c1,c2 from t1, will throw exception + connection = context.createConnection(USER1_1); + statement = context.createStatement(connection); + try { + statement.execute("SELECT c1,c2 FROM t1"); + assertTrue("User with privilege on one column is able to access other column!!", false); + } catch (SQLException e) { + context.verifyAuthzException(e); + } + + // 1.2 user_role1 count(col) works, *, count(*) and count(1) fails + statement.execute("SELECT count(c1) FROM t1"); + try { + statement.execute("SELECT * FROM t1"); + assertTrue("Select * should fail - only SELECT allowed on t1.c1!!", false); + } catch (SQLException e) { + context.verifyAuthzException(e); + } + try { + statement.execute("SELECT count(*) FROM t1"); + assertTrue("Select count(*) should fail - only SELECT allowed on t1.c1!!", false); + } catch (SQLException e) { + context.verifyAuthzException(e); + } + try { + statement.execute("SELECT count(1) FROM t1"); + assertTrue("Select count(1) should fail - only SELECT allowed on t1.c1!!", false); + } catch (SQLException e) { + context.verifyAuthzException(e); + } + + statement.close(); + connection.close(); + + + // 2.1 user_role2 can do *, count(col), but count(*) and count(1) fails + connection = context.createConnection(USER2_1); + statement = context.createStatement(connection); + statement.execute("SELECT count(c1) FROM t1"); + statement.execute("SELECT * FROM t1"); + + //SENTRY-838 + try { + statement.execute("SELECT count(*) FROM t1"); + assertTrue("Select count(*) works only with table level privileges - User has select on all columns!!", false); + } catch (Exception e) { + // Ignore + } + try { + statement.execute("SELECT count(1) FROM t1"); + assertTrue("Select count(1) works only with table level privileges - User has select on all columns!!", false); + } catch (Exception e) { + // Ignore + } + statement.close(); + connection.close(); + } + + @Test + public void testPositive() throws Exception { + Connection connection = context.createConnection(ADMIN1); + Statement statement = context.createStatement(connection); + statement.execute("CREATE database " + DB1); + statement.execute("use " + DB1); + statement.execute("CREATE TABLE t1 (c1 string, c2 string)"); + statement.execute("CREATE ROLE user_role1"); + statement.execute("CREATE ROLE user_role2"); + statement.execute("CREATE ROLE user_role3"); + statement.execute("GRANT SELECT (c1) ON TABLE t1 TO ROLE user_role1"); + statement.execute("GRANT SELECT (c1, c2) ON TABLE t1 TO ROLE user_role2"); + statement.execute("GRANT SELECT ON TABLE t1 TO ROLE user_role3"); + statement.execute("GRANT ROLE user_role1 TO GROUP " + USERGROUP1); + statement.execute("GRANT ROLE user_role2 TO GROUP " + USERGROUP2); + statement.execute("GRANT ROLE user_role3 TO GROUP " + USERGROUP3); + statement.close(); + connection.close(); + + // 1 user_role1 select c1 on t1 + connection = context.createConnection(USER1_1); + statement = context.createStatement(connection); + statement.execute("use " + DB1); + statement.execute("SELECT c1 FROM t1"); + statement.execute("DESCRIBE t1"); + + // 2.1 user_role2 select c1,c2 on t1 + connection = context.createConnection(USER2_1); + statement = context.createStatement(connection); + statement.execute("use " + DB1); + statement.execute("SELECT c1,c2 FROM t1"); + // 2.2 user_role2 select * on t1 + statement.execute("SELECT * FROM t1"); + + // 3.1 user_role3 select * on t1 + connection = context.createConnection(USER3_1); + statement = context.createStatement(connection); + statement.execute("use " + DB1); + statement.execute("SELECT * FROM t1"); + // 3.2 user_role3 select c1,c2 on t1 + statement.execute("SELECT c1,c2 FROM t1"); + + statement.close(); + connection.close(); + } + + @Test + public void testCreateTableAsSelect() throws Exception { + Connection connection = context.createConnection(ADMIN1); + Statement statement = context.createStatement(connection); + statement.execute("CREATE database " + DB1); + statement.execute("use " + DB1); + statement.execute("CREATE TABLE t1 (c1 string, c2 string)"); + statement.execute("CREATE ROLE user_role1"); + statement.execute("CREATE ROLE user_role2"); + statement.execute("CREATE ROLE user_role3"); + statement.execute("GRANT SELECT (c1) ON TABLE t1 TO ROLE user_role1"); + statement.execute("GRANT SELECT (c1, c2) ON TABLE t1 TO ROLE user_role2"); + statement.execute("GRANT SELECT ON TABLE t1 TO ROLE user_role3"); + statement.execute("GRANT CREATE ON DATABASE " + DB1 + " TO ROLE user_role1"); + statement.execute("GRANT CREATE ON DATABASE " + DB1 + " TO ROLE user_role2"); + statement.execute("GRANT CREATE ON DATABASE " + DB1 + " TO ROLE user_role3"); + statement.execute("GRANT ROLE user_role1 TO GROUP " + USERGROUP1); + statement.execute("GRANT ROLE user_role2 TO GROUP " + USERGROUP2); + statement.execute("GRANT ROLE user_role3 TO GROUP " + USERGROUP3); + statement.close(); + connection.close(); + + // 1 user_role1 create table as select + connection = context.createConnection(USER1_1); + statement = context.createStatement(connection); + statement.execute("use " + DB1); + statement.execute("CREATE TABLE t1_1 AS SELECT c1 FROM t1"); + try { + statement.execute("CREATE TABLE t1_2 AS SELECT * FROM t1"); + assertTrue("no permission on table t1!!", false); + } catch (Exception e) { + // Ignore + } + + // 2 user_role2 create table as select + connection = context.createConnection(USER2_1); + statement = context.createStatement(connection); + statement.execute("use " + DB1); + statement.execute("CREATE TABLE t2_1 AS SELECT c1 FROM t1"); + statement.execute("CREATE TABLE t2_2 AS SELECT * FROM t1"); + + // 3 user_role3 create table as select + connection = context.createConnection(USER3_1); + statement = context.createStatement(connection); + statement.execute("use " + DB1); + statement.execute("CREATE TABLE t3_1 AS SELECT c1 FROM t1"); + statement.execute("CREATE TABLE t3_2 AS SELECT * FROM t1"); + + statement.close(); + connection.close(); + } + + @Test + public void testShowColumns() throws Exception { + // grant select on test_tb(s) to USER1_1 + Connection connection = context.createConnection(ADMIN1); + Statement statement = context.createStatement(connection); + statement.execute("CREATE database " + DB1); + statement.execute("use " + DB1); + statement.execute("CREATE TABLE test_tb (s string, i string)"); + statement.execute("CREATE ROLE user_role1"); + statement.execute("GRANT SELECT (s) ON TABLE test_tb TO ROLE user_role1"); + statement.execute("GRANT ROLE user_role1 TO GROUP " + USERGROUP1); + statement.close(); + connection.close(); + + // USER1_1 executes "show columns in test_tb" and gets the s column information + connection = context.createConnection(USER1_1); + statement = context.createStatement(connection); + statement.execute("use " + DB1); + ResultSet res = statement.executeQuery("show columns in test_tb"); + + List<String> expectedResult = new ArrayList<String>(); + List<String> returnedResult = new ArrayList<String>(); + expectedResult.add("s"); + while (res.next()) { + returnedResult.add(res.getString(1).trim()); + } + validateReturnedResult(expectedResult, returnedResult); + returnedResult.clear(); + expectedResult.clear(); + res.close(); + + statement.close(); + connection.close(); + + // grant select on test_tb(s, i) to USER2_1 + connection = context.createConnection(ADMIN1); + statement = context.createStatement(connection); + statement.execute("use " + DB1); + statement.execute("CREATE ROLE user_role2"); + statement.execute("GRANT SELECT(s, i) ON TABLE test_tb TO ROLE user_role2"); + statement.execute("GRANT ROLE user_role2 TO GROUP " + USERGROUP2); + statement.close(); + connection.close(); + + // USER2_1 executes "show columns in test_tb" and gets the s,i columns information + connection = context.createConnection(USER2_1); + statement = context.createStatement(connection); + statement.execute("use " + DB1); + res = statement.executeQuery("show columns in test_tb"); + + expectedResult.add("s"); + expectedResult.add("i"); + while (res.next()) { + returnedResult.add(res.getString(1).trim()); + } + validateReturnedResult(expectedResult, returnedResult); + returnedResult.clear(); + expectedResult.clear(); + res.close(); + + statement.close(); + connection.close(); + + // USER3_1 executes "show columns in test_tb" and the exception will be thrown + connection = context.createConnection(USER3_1); + statement = context.createStatement(connection); + try { + // USER3_1 has no privilege on any column, so "show columns in test_tb" will throw an exception + statement.execute("show columns in db_1.test_tb"); + fail("No valid privileges exception should have been thrown"); + } catch (Exception e) { + } + + statement.close(); + connection.close(); + } +} http://git-wip-us.apache.org/repos/asf/sentry/blob/bfb354f2/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestConcurrentClients.java ---------------------------------------------------------------------- diff --git a/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestConcurrentClients.java b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestConcurrentClients.java new file mode 100644 index 0000000..bf871f0 --- /dev/null +++ b/sentry-tests/sentry-tests-hive-v2/src/test/java/org/apache/sentry/tests/e2e/dbprovider/TestConcurrentClients.java @@ -0,0 +1,343 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.sentry.tests.e2e.dbprovider; + +import org.apache.sentry.provider.db.service.thrift.SentryPolicyServiceClient; +import org.apache.sentry.provider.file.PolicyFile; +import org.apache.sentry.tests.e2e.hive.AbstractTestWithStaticConfiguration; + +import org.apache.sentry.tests.e2e.hive.StaticUserGroup; +import static org.junit.Assume.assumeTrue; +import org.junit.Before; +import org.junit.BeforeClass; +import org.junit.Test; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.sql.Connection; +import java.sql.ResultSet; +import java.sql.Statement; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.Executors; +import java.util.HashMap; +import java.util.Map; + +import org.apache.commons.lang.RandomStringUtils; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertEquals; + +/** + * The test class implements concurrency tests to test: + * Sentry client, HS2 jdbc client etc. + */ +public class TestConcurrentClients extends AbstractTestWithStaticConfiguration { + private static final Logger LOGGER = LoggerFactory + .getLogger(TestConcurrentClients.class); + + private PolicyFile policyFile; + + // define scale for tests + private final int NUM_OF_TABLES = Integer.parseInt(System.getProperty( + "sentry.e2e.concurrency.test.tables-per-db", "1")); + private final int NUM_OF_PAR = Integer.parseInt(System.getProperty( + "sentry.e2e.concurrency.test.partitions-per-tb", "3")); + private final int NUM_OF_THREADS = Integer.parseInt(System.getProperty( + "sentry.e2e.concurrency.test.threads", "30")); + private final int NUM_OF_TASKS = Integer.parseInt(System.getProperty( + "sentry.e2e.concurrency.test.tasks", "100")); + private final Long HS2_CLIENT_TEST_DURATION_MS = Long.parseLong(System.getProperty( + "sentry.e2e.concurrency.test.hs2client.test.time.ms", "10000")); //millis + private final Long SENTRY_CLIENT_TEST_DURATION_MS = Long.parseLong(System.getProperty( + "sentry.e2e.concurrency.test.sentryclient.test.time.ms", "10000")); //millis + + private static Map<String, String> privileges = new HashMap<String, String>(); + static { + privileges.put("all_db1", "server=server1->db=" + DB1 + "->action=all"); + } + + @Override + @Before + public void setup() throws Exception { + super.setupAdmin(); + policyFile = PolicyFile.setAdminOnServer1(ADMINGROUP) + .setUserGroupMapping(StaticUserGroup.getStaticMapping()); + writePolicyFile(policyFile); + } + + @BeforeClass + public static void setupTestStaticConfiguration() throws Exception { + assumeTrue(Boolean.parseBoolean(System.getProperty("sentry.scaletest.oncluster", "false"))); + useSentryService = true; // configure sentry client + clientKerberos = true; // need to get client configuration from testing environments + AbstractTestWithStaticConfiguration.setupTestStaticConfiguration(); + } + + static String randomString( int len ){ + return RandomStringUtils.random(len, true, false); + } + + private void execStmt(Statement stmt, String sql) throws Exception { + LOGGER.info("Running [" + sql + "]"); + stmt.execute(sql); + } + + private void createDbTb(String user, String db, String tb) throws Exception{ + Connection connection = context.createConnection(user); + Statement statement = context.createStatement(connection); + try { + execStmt(statement, "DROP DATABASE IF EXISTS " + db + " CASCADE"); + execStmt(statement, "CREATE DATABASE " + db); + execStmt(statement, "USE " + db); + for (int i = 0; i < NUM_OF_TABLES; i++) { + String tbName = tb + "_" + Integer.toString(i); + execStmt(statement, "CREATE TABLE " + tbName + " (a string) PARTITIONED BY (b string)"); + } + } catch (Exception ex) { + LOGGER.error("caught exception: " + ex); + } finally { + statement.close(); + connection.close(); + } + } + + private void createPartition(String user, String db, String tb) throws Exception{ + Connection connection = context.createConnection(user); + Statement statement = context.createStatement(connection); + try { + execStmt(statement, "USE " + db); + for (int j = 0; j < NUM_OF_TABLES; j++) { + String tbName = tb + "_" + Integer.toString(j); + for (int i = 0; i < NUM_OF_PAR; i++) { + String randStr = randomString(4); + String sql = "ALTER TABLE " + tbName + " ADD IF NOT EXISTS PARTITION (b = '" + randStr + "') "; + LOGGER.info("[" + i + "] " + sql); + execStmt(statement, sql); + } + } + } catch (Exception ex) { + LOGGER.error("caught exception: " + ex); + } finally { + statement.close(); + connection.close(); + } + } + + private void adminCreateRole(String roleName) throws Exception { + Connection connection = context.createConnection(ADMIN1); + Statement stmt = context.createStatement(connection); + try { + execStmt(stmt, "DROP ROLE " + roleName); + } catch (Exception ex) { + LOGGER.warn("Role does not exist " + roleName); + } finally { + try { + execStmt(stmt, "CREATE ROLE " + roleName); + } catch (Exception ex) { + LOGGER.error("caught exception when create new role: " + ex); + } finally { + stmt.close(); + connection.close(); + } + } + } + + private void adminCleanUp(String db, String roleName) throws Exception { + Connection connection = context.createConnection(ADMIN1); + Statement stmt = context.createStatement(connection); + try { + execStmt(stmt, "DROP DATABASE IF EXISTS " + db + " CASCADE"); + execStmt(stmt, "DROP ROLE " + roleName); + } catch (Exception ex) { + LOGGER.warn("Failed to clean up ", ex); + } finally { + stmt.close(); + connection.close(); + } + } + + private void adminShowRole(String roleName) throws Exception { + Connection connection = context.createConnection(ADMIN1); + Statement stmt = context.createStatement(connection); + boolean found = false; + try { + ResultSet rs = stmt.executeQuery("SHOW ROLES "); + while (rs.next()) { + if (rs.getString("role").equalsIgnoreCase(roleName)) { + LOGGER.info("Found role " + roleName); + found = true; + } + } + } catch (Exception ex) { + LOGGER.error("caught exception when show roles: " + ex); + } finally { + stmt.close(); + connection.close(); + } + assertTrue("failed to detect " + roleName, found); + } + + private void adminGrant(String test_db, String test_tb, + String roleName, String group) throws Exception { + Connection connection = context.createConnection(ADMIN1); + Statement stmt = context.createStatement(connection); + try { + execStmt(stmt, "USE " + test_db); + for (int i = 0; i < NUM_OF_TABLES; i++) { + String tbName = test_tb + "_" + Integer.toString(i); + execStmt(stmt, "GRANT ALL ON TABLE " + tbName + " TO ROLE " + roleName); + } + execStmt(stmt, "GRANT ROLE " + roleName + " TO GROUP " + group); + } catch (Exception ex) { + LOGGER.error("caught exception when grant permission and role: " + ex); + } finally { + stmt.close(); + connection.close(); + } + } + + /** + * A synchronized state class to track concurrency test status from each thread + */ + private final static class TestRuntimeState { + private int numSuccess = 0; + private boolean failed = false; + private Throwable firstException = null; + + public synchronized void setFirstException(Throwable e) { + failed = true; + if (firstException == null) { + firstException = e; + } + } + public synchronized void setNumSuccess() { + numSuccess += 1; + } + public synchronized int getNumSuccess() { + return numSuccess; + } + public synchronized Throwable getFirstException() { + return firstException; + } + } + + /** + * Test when concurrent HS2 clients talking to server, + * Privileges are correctly created and updated. + * @throws Exception + */ + @Test + public void testConccurentHS2Client() throws Exception { + ExecutorService executor = Executors.newFixedThreadPool(NUM_OF_THREADS); + final TestRuntimeState state = new TestRuntimeState(); + + for (int i = 0; i < NUM_OF_TASKS; i ++) { + executor.execute(new Runnable() { + @Override + public void run() { + LOGGER.info("Starting tests: create role, show role, create db and tbl, and create partitions"); + if (state.failed) { + return; + } + try { + Long startTime = System.currentTimeMillis(); + Long elapsedTime = 0L; + while (Long.compare(elapsedTime, HS2_CLIENT_TEST_DURATION_MS) <= 0) { + String randStr = randomString(5); + String test_role = "test_role_" + randStr; + String test_db = "test_db_" + randStr; + String test_tb = "test_tb_" + randStr; + LOGGER.info("Start to test sentry with hs2 client with role " + test_role); + adminCreateRole(test_role); + adminShowRole(test_role); + createDbTb(ADMIN1, test_db, test_tb); + adminGrant(test_db, test_tb, test_role, USERGROUP1); + createPartition(USER1_1, test_db, test_tb); + adminCleanUp(test_db, test_role); + elapsedTime = System.currentTimeMillis() - startTime; + LOGGER.info("elapsedTime = " + elapsedTime); + } + state.setNumSuccess(); + } catch (Exception e) { + LOGGER.error("Exception: " + e); + state.setFirstException(e); + } + } + }); + } + executor.shutdown(); + while (!executor.isTerminated()) { + Thread.sleep(1000); //millisecond + } + Throwable ex = state.getFirstException(); + assertFalse( ex == null ? "Test failed" : ex.toString(), state.failed); + assertEquals(NUM_OF_TASKS, state.getNumSuccess()); + } + + /** + * Test when concurrent sentry clients talking to sentry server, threads data are synchronized + * @throws Exception + */ + @Test + public void testConcurrentSentryClient() throws Exception { + final String HIVE_KEYTAB_PATH = + System.getProperty("sentry.e2etest.hive.policyOwnerKeytab"); + final SentryPolicyServiceClient client = getSentryClient("hive", HIVE_KEYTAB_PATH); + ExecutorService executor = Executors.newFixedThreadPool(NUM_OF_THREADS); + + final TestRuntimeState state = new TestRuntimeState(); + for (int i = 0; i < NUM_OF_TASKS; i ++) { + LOGGER.info("Start to test sentry client with task id [" + i + "]"); + executor.execute(new Runnable() { + @Override + public void run() { + if (state.failed) { + LOGGER.error("found one failed state, abort test from here."); + return; + } + try { + String randStr = randomString(5); + String test_role = "test_role_" + randStr; + LOGGER.info("Start to test role: " + test_role); + Long startTime = System.currentTimeMillis(); + Long elapsedTime = 0L; + while (Long.compare(elapsedTime, SENTRY_CLIENT_TEST_DURATION_MS) <= 0) { + LOGGER.info("Test role " + test_role + " runs " + elapsedTime + " ms."); + client.createRole(ADMIN1, test_role); + client.listRoles(ADMIN1); + client.grantServerPrivilege(ADMIN1, test_role, "server1", false); + client.listAllPrivilegesByRoleName(ADMIN1, test_role); + client.dropRole(ADMIN1, test_role); + elapsedTime = System.currentTimeMillis() - startTime; + } + state.setNumSuccess(); + } catch (Exception e) { + LOGGER.error("Sentry Client Testing Exception: ", e); + state.setFirstException(e); + } + } + }); + } + executor.shutdown(); + while (!executor.isTerminated()) { + Thread.sleep(1000); //millisecond + } + Throwable ex = state.getFirstException(); + assertFalse( ex == null ? "Test failed" : ex.toString(), state.failed); + assertEquals(NUM_OF_TASKS, state.getNumSuccess()); + } +}
