Repository: incubator-sentry
Updated Branches:
refs/heads/sentry-hdfs-plugin d66ebb71a -> e9160ba81
SENTRY-432: Made following changes :
- Fix Sentry HDFS Integration test to start HMS after HDFS exits
safemode
- Ensure privileges granted/revoked on Db propogates to table
directories
- Cleaned up more log messages
- Serialize updates from Sentry to Metastore to reduce number of
sync messages
- Add flag to switch to TBinaryProtocol on SentryHDFSServiceClient
- Remove Unknown protocol warning from Sentry Service
- Added more test cases
- Add /lib/plugins folder to sentry classpath
Project: http://git-wip-us.apache.org/repos/asf/incubator-sentry/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-sentry/commit/e9160ba8
Tree: http://git-wip-us.apache.org/repos/asf/incubator-sentry/tree/e9160ba8
Diff: http://git-wip-us.apache.org/repos/asf/incubator-sentry/diff/e9160ba8
Branch: refs/heads/sentry-hdfs-plugin
Commit: e9160ba81bcb97acc4b42d4ab99c98dd2f42d25d
Parents: d66ebb7
Author: Arun Suresh <[email protected]>
Authored: Wed Oct 29 22:29:21 2014 -0700
Committer: Arun Suresh <[email protected]>
Committed: Wed Oct 29 22:29:21 2014 -0700
----------------------------------------------------------------------
bin/sentry | 3 +
pom.xml | 1 +
.../SentryMetastorePostEventListener.java | 5 +-
sentry-dist/pom.xml | 14 +-
sentry-dist/src/main/assembly/bin.xml | 14 +-
sentry-dist/src/main/assembly/sentry-hdfs.xml | 65 --------
.../sentry/hdfs/SentryHDFSServiceClient.java | 14 +-
.../apache/sentry/hdfs/ServiceConstants.java | 2 +
.../sentry/hdfs/UpdateableAuthzPaths.java | 2 +-
.../sentry/hdfs/TestHMSPathsFullDump.java | 20 ++-
sentry-hdfs/sentry-hdfs-dist/pom.xml | 45 +++---
.../src/main/assembly/all-jar.xml | 18 ---
.../hdfs/SentryAuthorizationConstants.java | 2 +-
.../hdfs/SentryAuthorizationProvider.java | 2 +
.../apache/sentry/hdfs/SentryPermissions.java | 69 +++++++-
.../sentry/hdfs/UpdateableAuthzPermissions.java | 36 +++--
.../hdfs/TestSentryAuthorizationProvider.java | 1 +
.../org/apache/sentry/hdfs/MetastorePlugin.java | 97 ++++++++++--
.../sentry/hdfs/SentryHDFSServiceProcessor.java | 2 +-
.../hdfs/SentryHDFSServiceProcessorFactory.java | 8 +-
.../org/apache/sentry/hdfs/SentryPlugin.java | 38 +++--
.../db/SentryMetastoreListenerPlugin.java | 2 +-
.../provider/db/SentryPolicyStorePlugin.java | 3 +
.../thrift/SentryPolicyServiceClient.java | 4 +-
.../thrift/SentryPolicyStoreProcessor.java | 11 +-
.../sentry/service/thrift/SentryService.java | 2 +-
.../tests/e2e/hdfs/TestHDFSIntegration.java | 157 +++++++++++++++----
...actMetastoreTestWithStaticConfiguration.java | 1 -
28 files changed, 425 insertions(+), 213 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/bin/sentry
----------------------------------------------------------------------
diff --git a/bin/sentry b/bin/sentry
index 0b98049..93809ea 100755
--- a/bin/sentry
+++ b/bin/sentry
@@ -72,6 +72,9 @@ then
for f in ${SENTRY_HOME}/lib/server/*.jar; do
HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:${f}
done
+ for f in ${SENTRY_HOME}/lib/plugins/*.jar; do
+ HADOOP_CLASSPATH=${HADOOP_CLASSPATH}:${f}
+ done
exec $HADOOP jar ${SENTRY_HOME}/lib/${_CMD_JAR} org.apache.sentry.SentryMain
${args[@]}
else
exec ${SENTRY_HOME}/bin/config_tool ${args[@]}
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/pom.xml
----------------------------------------------------------------------
diff --git a/pom.xml b/pom.xml
index 02d70da..27d8fa7 100644
--- a/pom.xml
+++ b/pom.xml
@@ -452,6 +452,7 @@ limitations under the License.
<downloadSources>true</downloadSources>
<workspaceActiveCodeStyleProfileName>GoogleStyle</workspaceActiveCodeStyleProfileName>
<workspaceCodeStylesURL>https://google-styleguide.googlecode.com/svn/trunk/eclipse-java-google-style.xml</workspaceCodeStylesURL>
+
<sourceIncludes><include>src/gen/thrift/gen-javabean/**</include></sourceIncludes>
</configuration>
</plugin>
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java
----------------------------------------------------------------------
diff --git
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java
index efbd43f..1fee287 100644
---
a/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java
+++
b/sentry-binding/sentry-binding-hive/src/main/java/org/apache/sentry/binding/metastore/SentryMetastorePostEventListener.java
@@ -114,7 +114,7 @@ public class SentryMetastorePostEventListener extends
MetaStoreEventListener {
String authzObj = tableEvent.getTable().getDbName() + "."
+ tableEvent.getTable().getTableName();
for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
- plugin.removePath(authzObj, "*");
+ plugin.removeAllPaths(authzObj, null);
}
}
// drop the privileges on the given table
@@ -160,7 +160,8 @@ public class SentryMetastorePostEventListener extends
MetaStoreEventListener {
public void onDropDatabase(DropDatabaseEvent dbEvent) throws MetaException {
String authzObj = dbEvent.getDatabase().getName();
for (SentryMetastoreListenerPlugin plugin : sentryPlugins) {
- plugin.removePath(authzObj, "*");
+ List<String> tNames = dbEvent.getHandler().get_all_tables(authzObj);
+ plugin.removeAllPaths(authzObj, tNames);
}
dropSentryDbPrivileges(dbEvent.getDatabase().getName());
if (!syncWithPolicyStore(AuthzConfVars.AUTHZ_SYNC_DROP_WITH_POLICY_STORE))
{
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-dist/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-dist/pom.xml b/sentry-dist/pom.xml
index 19a9707..4eb1d9c 100644
--- a/sentry-dist/pom.xml
+++ b/sentry-dist/pom.xml
@@ -74,21 +74,10 @@ limitations under the License.
<groupId>org.apache.sentry</groupId>
<artifactId>sentry-policy-search</artifactId>
</dependency>
- <dependency>
- <groupId>org.apache.sentry</groupId>
- <artifactId>sentry-hdfs-common</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.sentry</groupId>
- <artifactId>sentry-hdfs-service</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.sentry</groupId>
- <artifactId>sentry-hdfs-namenode-plugin</artifactId>
- </dependency>
</dependencies>
<build>
<plugins>
+
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-assembly-plugin</artifactId>
@@ -105,7 +94,6 @@ limitations under the License.
<descriptors>
<descriptor>src/main/assembly/src.xml</descriptor>
<descriptor>src/main/assembly/bin.xml</descriptor>
- <descriptor>src/main/assembly/sentry-hdfs.xml</descriptor>
</descriptors>
</configuration>
</execution>
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-dist/src/main/assembly/bin.xml
----------------------------------------------------------------------
diff --git a/sentry-dist/src/main/assembly/bin.xml
b/sentry-dist/src/main/assembly/bin.xml
index 6b95a3c..f1d301b 100644
--- a/sentry-dist/src/main/assembly/bin.xml
+++ b/sentry-dist/src/main/assembly/bin.xml
@@ -46,6 +46,18 @@
<exclude>org.apache.derby:derby</exclude>
</excludes>
</dependencySet>
+<!--
+ <dependencySet>
+ <outputDirectory>lib/plugins</outputDirectory>
+ <unpack>true</unpack>
+ <useTransitiveDependencies>false</useTransitiveDependencies>
+ <includes>
+ <include>org.apache.sentry:sentry-provider-db</include>
+ <include>org.apache.sentry:sentry-hdfs-common</include>
+ <include>org.apache.sentry:sentry-hdfs-namenode-plugin</include>
+ </includes>
+ </dependencySet>
+-->
<dependencySet>
<outputDirectory>lib/server</outputDirectory>
<unpack>false</unpack>
@@ -102,7 +114,7 @@
<excludes>
<exclude>sentry-hdfs-dist-*.jar</exclude>
</excludes>
- <outputDirectory>lib</outputDirectory>
+ <outputDirectory>lib/plugins</outputDirectory>
</fileSet>
<fileSet>
<directory>${project.parent.basedir}/sentry-provider/sentry-provider-db/src/main/resources</directory>
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-dist/src/main/assembly/sentry-hdfs.xml
----------------------------------------------------------------------
diff --git a/sentry-dist/src/main/assembly/sentry-hdfs.xml
b/sentry-dist/src/main/assembly/sentry-hdfs.xml
deleted file mode 100644
index 8aef857..0000000
--- a/sentry-dist/src/main/assembly/sentry-hdfs.xml
+++ /dev/null
@@ -1,65 +0,0 @@
-<!--
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied. See the License for the
- specific language governing permissions and limitations
- under the License.
--->
-
-<assembly
- xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.2
http://maven.apache.org/xsd/assembly-1.1.2.xsd">
-
- <id>hdfs</id>
-
- <formats>
- <format>tar.gz</format>
- </formats>
-
- <baseDirectory>sentry-hdfs-${project.version}</baseDirectory>
-
- <dependencySets>
- <dependencySet>
- <outputDirectory>/</outputDirectory>
- <unpack>true</unpack>
- <useProjectArtifact>false</useProjectArtifact>
- <useStrictFiltering>true</useStrictFiltering>
- <useTransitiveFiltering>false</useTransitiveFiltering>
- <includes>
- <include>org.apache.thrift:libthrift</include>
- <include>org.apache.thrift:libfb303</include>
- <include>org.apache.sentry:sentry-hdfs-namenode-plugin</include>
- <include>org.apache.sentry:sentry-hdfs-common</include>
- <include>org.apache.sentry:sentry-hdfs-service</include>
- </includes>
- </dependencySet>
- </dependencySets>
-
-<!--
- <fileSets>
- <fileSet>
-
<directory>${project.parent.basedir}/sentry-hdfs/sentry-hdfs-dist/target</directory>
- <includes>
- <include>sentry-hdfs-*.jar</include>
- </includes>
- <excludes>
- <exclude>sentry-hdfs-dist-*.jar</exclude>
- </excludes>
- <outputDirectory>/</outputDirectory>
- </fileSet>
- </fileSets>
--->
-</assembly>
-
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClient.java
----------------------------------------------------------------------
diff --git
a/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClient.java
b/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClient.java
index ecd6ee7..5425daa 100644
---
a/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClient.java
+++
b/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceClient.java
@@ -39,8 +39,10 @@ import org.apache.sentry.hdfs.service.thrift.TPathsUpdate;
import org.apache.sentry.hdfs.service.thrift.TPermissionsUpdate;
import org.apache.sentry.hdfs.ServiceConstants.ClientConfig;
import org.apache.sentry.hdfs.ServiceConstants.ServerConfig;
-import org.apache.thrift.protocol.TCompactProtocol;
+import org.apache.thrift.protocol.TBinaryProtocol;
+//import org.apache.thrift.protocol.TCompactProtocol;
import org.apache.thrift.protocol.TMultiplexedProtocol;
+import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TSaslClientTransport;
import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport;
@@ -167,9 +169,15 @@ public class SentryHDFSServiceClient {
throw new IOException("Transport exception while opening transport: " +
e.getMessage(), e);
}
LOGGER.info("Successfully opened transport: " + transport + " to " +
serverAddress);
+ TProtocol tProtocol = new TBinaryProtocol(transport);
+// if (conf.getBoolean(ClientConfig.USE_COMPACT_TRANSPORT,
+// ClientConfig.USE_COMPACT_TRANSPORT_DEFAULT)) {
+// tProtocol = new TCompactProtocol(transport);
+// } else {
+// tProtocol = new TBinaryProtocol(transport);
+// }
TMultiplexedProtocol protocol = new TMultiplexedProtocol(
- new TCompactProtocol(transport),
- SentryHDFSServiceClient.SENTRY_HDFS_SERVICE_NAME);
+ tProtocol, SentryHDFSServiceClient.SENTRY_HDFS_SERVICE_NAME);
client = new SentryHDFSService.Client(protocol);
LOGGER.info("Successfully created client");
}
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/ServiceConstants.java
----------------------------------------------------------------------
diff --git
a/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/ServiceConstants.java
b/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/ServiceConstants.java
index 27ab336..64cb943 100644
---
a/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/ServiceConstants.java
+++
b/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/ServiceConstants.java
@@ -63,6 +63,8 @@ public class ServiceConstants {
public static final String SERVER_RPC_CONN_TIMEOUT =
"sentry.hdfs.service.client.server.rpc-connection-timeout";
public static final int SERVER_RPC_CONN_TIMEOUT_DEFAULT = 200000;
+ public static final String USE_COMPACT_TRANSPORT =
"sentry.hdfs.service.client.compact.transport";
+ public static final boolean USE_COMPACT_TRANSPORT_DEFAULT = false;
}
}
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPaths.java
----------------------------------------------------------------------
diff --git
a/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPaths.java
b/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPaths.java
index adf658c..03b288b 100644
---
a/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPaths.java
+++
b/sentry-hdfs/sentry-hdfs-common/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPaths.java
@@ -77,7 +77,7 @@ public class UpdateableAuthzPaths implements AuthzPaths,
Updateable<PathsUpdate>
lock.writeLock().lock();
}
seqNum.set(update.getSeqNum());
- LOG.warn("##### Updated paths seq Num [" + seqNum.get() + "]");
+ LOG.debug("##### Updated paths seq Num [" + seqNum.get() + "]");
}
} finally {
lock.writeLock().unlock();
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestHMSPathsFullDump.java
----------------------------------------------------------------------
diff --git
a/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestHMSPathsFullDump.java
b/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestHMSPathsFullDump.java
index 2d9db53..2dfe73c 100644
---
a/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestHMSPathsFullDump.java
+++
b/sentry-hdfs/sentry-hdfs-common/src/test/java/org/apache/sentry/hdfs/TestHMSPathsFullDump.java
@@ -23,12 +23,16 @@ import org.apache.sentry.hdfs.service.thrift.TPathsDump;
import org.apache.thrift.TDeserializer;
import org.apache.thrift.TException;
import org.apache.thrift.TSerializer;
+import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TCompactProtocol;
+import org.apache.thrift.protocol.TProtocolFactory;
import org.junit.Test;
import com.google.common.collect.Lists;
public class TestHMSPathsFullDump {
+
+ private static boolean useCompact = true;
@Test
public void testDumpAndInitialize() {
@@ -71,13 +75,13 @@ public class TestHMSPathsFullDump {
public void testThrftSerialization() throws TException {
HMSPaths hmsPaths = new HMSPaths(new String[] {"/"});
String prefix = "/user/hive/warehouse/";
- for (int dbNum = 0; dbNum < 1; dbNum++) {
+ for (int dbNum = 0; dbNum < 10; dbNum++) {
String dbName = "db" + dbNum;
hmsPaths._addAuthzObject(dbName, Lists.newArrayList(prefix + dbName));
- for (int tblNum = 0; tblNum < 1000000; tblNum++) {
+ for (int tblNum = 0; tblNum < 1000; tblNum++) {
String tblName = "tbl" + tblNum;
hmsPaths._addAuthzObject(dbName + "." + tblName,
Lists.newArrayList(prefix + dbName + "/" + tblName));
- for (int partNum = 0; partNum < 1; partNum++) {
+ for (int partNum = 0; partNum < 100; partNum++) {
String partName = "part" + partNum;
hmsPaths
._addPathsToAuthzObject(
@@ -90,17 +94,19 @@ public class TestHMSPathsFullDump {
HMSPathsDumper serDe = hmsPaths.getPathsDump();
long t1 = System.currentTimeMillis();
TPathsDump pathsDump = serDe.createPathsDump();
- byte[] ser = new TSerializer(new
TCompactProtocol.Factory()).serialize(pathsDump);
+
+ TProtocolFactory protoFactory = useCompact ? new
TCompactProtocol.Factory() : new TBinaryProtocol.Factory();
+ byte[] ser = new TSerializer(protoFactory).serialize(pathsDump);
long serTime = System.currentTimeMillis() - t1;
System.out.println("Serialization Time: " + serTime + ", " + ser.length);
t1 = System.currentTimeMillis();
TPathsDump tPathsDump = new TPathsDump();
- new TDeserializer(new TCompactProtocol.Factory()).deserialize(tPathsDump,
ser);
+ new TDeserializer(protoFactory).deserialize(tPathsDump, ser);
HMSPaths fromDump = serDe.initializeFromDump(tPathsDump);
System.out.println("Deserialization Time: " + (System.currentTimeMillis()
- t1));
- Assert.assertEquals("db9.tbl999", fromDump.findAuthzObject(new
String[]{"user", "hive", "warehouse", "db0", "tbl999"}, false));
- Assert.assertEquals("db9.tbl999", fromDump.findAuthzObject(new
String[]{"user", "hive", "warehouse", "db0", "tbl999", "part5"}, false));
+ Assert.assertEquals("db9.tbl999", fromDump.findAuthzObject(new
String[]{"user", "hive", "warehouse", "db9", "tbl999"}, false));
+ Assert.assertEquals("db9.tbl999", fromDump.findAuthzObject(new
String[]{"user", "hive", "warehouse", "db9", "tbl999", "part99"}, false));
}
}
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-hdfs/sentry-hdfs-dist/pom.xml
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-dist/pom.xml
b/sentry-hdfs/sentry-hdfs-dist/pom.xml
index 91b8248..4bbb212 100644
--- a/sentry-hdfs/sentry-hdfs-dist/pom.xml
+++ b/sentry-hdfs/sentry-hdfs-dist/pom.xml
@@ -31,6 +31,10 @@ limitations under the License.
<dependencies>
<dependency>
<groupId>org.apache.sentry</groupId>
+ <artifactId>sentry-provider-db</artifactId>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.sentry</groupId>
<artifactId>sentry-hdfs-common</artifactId>
</dependency>
<dependency>
@@ -45,26 +49,29 @@ limitations under the License.
<build>
<plugins>
+
<plugin>
- <groupId>org.apache.maven.plugins</groupId>
- <artifactId>maven-assembly-plugin</artifactId>
- <version>2.4.1</version>
- <executions>
- <execution>
- <id>assemble</id>
- <phase>package</phase>
- <goals>
- <goal>single</goal>
- </goals>
- <inherited>false</inherited>
- <configuration>
- <finalName>sentry-hdfs</finalName>
- <descriptors>
- <descriptor>src/main/assembly/all-jar.xml</descriptor>
- </descriptors>
- </configuration>
- </execution>
- </executions>
+ <groupId>org.apache.maven.plugins</groupId>
+ <artifactId>maven-shade-plugin</artifactId>
+ <version>2.1</version>
+ <executions>
+ <execution>
+ <phase>package</phase>
+ <goals>
+ <goal>shade</goal>
+ </goals>
+ <configuration>
+ <finalName>sentry-hdfs-${project.version}</finalName>
+ <artifactSet>
+ <includes>
+ <include>org.apache.sentry:sentry-hdfs-common</include>
+
<include>org.apache.sentry:sentry-hdfs-namenode-plugin</include>
+ <include>org.apache.sentry:sentry-provider-db</include>
+ </includes>
+ </artifactSet>
+ </configuration>
+ </execution>
+ </executions>
</plugin>
</plugins>
</build>
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-hdfs/sentry-hdfs-dist/src/main/assembly/all-jar.xml
----------------------------------------------------------------------
diff --git a/sentry-hdfs/sentry-hdfs-dist/src/main/assembly/all-jar.xml
b/sentry-hdfs/sentry-hdfs-dist/src/main/assembly/all-jar.xml
deleted file mode 100644
index 8db709b..0000000
--- a/sentry-hdfs/sentry-hdfs-dist/src/main/assembly/all-jar.xml
+++ /dev/null
@@ -1,18 +0,0 @@
-<assembly
- xmlns="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0"
- xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
-
xsi:schemaLocation="http://maven.apache.org/plugins/maven-assembly-plugin/assembly/1.1.0
http://maven.apache.org/xsd/assembly-1.1.0.xsd">
- <id>${project.version}</id>
- <formats>
- <format>jar</format> <!-- the result is a jar file -->
- </formats>
-
- <includeBaseDirectory>false</includeBaseDirectory> <!-- strip the module
prefixes -->
-
- <dependencySets>
- <dependencySet>
- <unpack>true</unpack> <!-- unpack , then repack the jars -->
- <useTransitiveDependencies>false</useTransitiveDependencies> <!-- do not
pull in any transitive dependencies -->
- </dependencySet>
- </dependencySets>
-</assembly>
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationConstants.java
----------------------------------------------------------------------
diff --git
a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationConstants.java
b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationConstants.java
index 9f219ce..cf33b8b 100644
---
a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationConstants.java
+++
b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationConstants.java
@@ -51,5 +51,5 @@ public class SentryAuthorizationConstants {
public static final String INCLUDE_HDFS_AUTHZ_AS_ACL_KEY = CONFIG_PREFIX +
"include-hdfs-authz-as-acl";
- public static final boolean INCLUDE_HDFS_AUTHZ_AS_ACL_DEFAULT = true;
+ public static final boolean INCLUDE_HDFS_AUTHZ_AS_ACL_DEFAULT = false;
}
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java
----------------------------------------------------------------------
diff --git
a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java
b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java
index cfd5862..7d2940c 100644
---
a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java
+++
b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryAuthorizationProvider.java
@@ -315,6 +315,8 @@ public class SentryAuthorizationProvider
}
FsPermission perm = defaultAuthzProvider.getFsPermission(node,
snapshotId);
list.addAll(createAclEntries(user, group, perm));
+ } else {
+ list.addAll(createAclEntries(this.user, this.group, this.permission));
}
if (!authzInfo.isStale()) {
isStale = false;
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryPermissions.java
----------------------------------------------------------------------
diff --git
a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryPermissions.java
b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryPermissions.java
index 7461f89..4b27e7b 100644
---
a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryPermissions.java
+++
b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/SentryPermissions.java
@@ -83,24 +83,81 @@ public class SentryPermissions implements AuthzPermissions {
private final Map<String, PrivilegeInfo> privileges = new HashMap<String,
PrivilegeInfo>();
private final Map<String, RoleInfo> roles = new HashMap<String, RoleInfo>();
+ private Map<String, Set<String>> authzObjChildren = new HashMap<String,
Set<String>>();
+
+ String getParentAuthzObject(String authzObject) {
+ int dot = authzObject.indexOf('.');
+ if (dot > 0) {
+ return authzObject.substring(0, dot);
+ } else {
+ return authzObject;
+ }
+ }
- @Override
- public List<AclEntry> getAcls(String authzObj) {
- PrivilegeInfo privilegeInfo = privileges.get(authzObj);
+ void addParentChildMappings(String authzObject) {
+ String parent = getParentAuthzObject(authzObject);
+ if (parent != null) {
+ Set<String> children = authzObjChildren.get(parent);
+ if (children == null) {
+ children = new HashSet<String>();
+ authzObjChildren.put(parent, children);
+ }
+ children.add(authzObject);
+ }
+ }
+
+ void removeParentChildMappings(String authzObject) {
+ String parent = getParentAuthzObject(authzObject);
+ if (parent != null) {
+ Set<String> children = authzObjChildren.get(parent);
+ if (children != null) {
+ children.remove(authzObject);
+ }
+ } else {
+ // is parent
+ authzObjChildren.remove(authzObject);
+ }
+ }
+
+ private Map<String, FsAction> getGroupPerms(String authzObj) {
Map<String, FsAction> groupPerms = new HashMap<String, FsAction>();
+ if (authzObj == null) {
+ return groupPerms;
+ }
+ PrivilegeInfo privilegeInfo = privileges.get(authzObj);
if (privilegeInfo != null) {
for (Map.Entry<String, FsAction> privs : privilegeInfo
.getAllPermissions().entrySet()) {
constructAclEntry(privs.getKey(), privs.getValue(), groupPerms);
}
}
+ return groupPerms;
+ }
+
+ @Override
+ public List<AclEntry> getAcls(String authzObj) {
+ Map<String, FsAction> groupPerms = getGroupPerms(authzObj);
+ String parent = getParentAuthzObject(authzObj);
+ Map<String, FsAction> pGroupPerms = null;
+ if (parent == null) {
+ pGroupPerms = new HashMap<String, FsAction>();
+ } else {
+ pGroupPerms = getGroupPerms(getParentAuthzObject(authzObj));
+ if ((groupPerms == null)||(groupPerms.size() == 0)) {
+ groupPerms = pGroupPerms;
+ }
+ }
List<AclEntry> retList = new LinkedList<AclEntry>();
for (Map.Entry<String, FsAction> groupPerm : groupPerms.entrySet()) {
AclEntry.Builder builder = new AclEntry.Builder();
builder.setName(groupPerm.getKey());
builder.setType(AclEntryType.GROUP);
builder.setScope(AclEntryScope.ACCESS);
- FsAction action = groupPerm.getValue();
+ FsAction action = groupPerm.getValue();
+ FsAction pAction = pGroupPerms.get(groupPerm.getKey());
+ if (pAction != null) {
+ action.or(pAction);
+ }
if ((action == FsAction.READ) || (action == FsAction.WRITE)
|| (action == FsAction.READ_WRITE)) {
action = action.or(FsAction.EXECUTE);
@@ -145,6 +202,10 @@ public class SentryPermissions implements AuthzPermissions
{
privileges.put(privilegeInfo.authzObj, privilegeInfo);
}
+ public Set<String> getChildren(String authzObj) {
+ return authzObjChildren.get(authzObj);
+ }
+
public RoleInfo getRoleInfo(String role) {
return roles.get(role);
}
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPermissions.java
----------------------------------------------------------------------
diff --git
a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPermissions.java
b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPermissions.java
index 476e9c4..e4ff1d5 100644
---
a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPermissions.java
+++
b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/main/java/org/apache/sentry/hdfs/UpdateableAuthzPermissions.java
@@ -17,10 +17,11 @@
*/
package org.apache.sentry.hdfs;
-import java.util.Collection;
import java.util.HashMap;
+import java.util.LinkedList;
import java.util.List;
import java.util.Map;
+import java.util.Set;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReadWriteLock;
@@ -75,7 +76,7 @@ public class UpdateableAuthzPermissions implements
AuthzPermissions, Updateable<
lock.writeLock().lock();
}
seqNum.set(update.getSeqNum());
- LOG.warn("##### Updated perms seq Num [" + seqNum.get() + "]");
+ LOG.debug("##### Updated perms seq Num [" + seqNum.get() + "]");
}
} finally {
lock.writeLock().unlock();
@@ -127,14 +128,16 @@ public class UpdateableAuthzPermissions implements
AuthzPermissions, Updateable<
PrivilegeInfo privilegeInfo = perms.getPrivilegeInfo(oldAuthzObj);
Map<String, FsAction> allPermissions =
privilegeInfo.getAllPermissions();
perms.delPrivilegeInfo(oldAuthzObj);
+ perms.removeParentChildMappings(oldAuthzObj);
PrivilegeInfo newPrivilegeInfo = new PrivilegeInfo(newAuthzObj);
for (Map.Entry<String, FsAction> e : allPermissions.entrySet()) {
newPrivilegeInfo.setPermission(e.getKey(), e.getValue());
}
perms.addPrivilegeInfo(newPrivilegeInfo);
+ perms.addParentChildMappings(newAuthzObj);
return;
}
- if (pUpdate.getAuthzObj().equals(PermissionsUpdate.ALL_PRIVS)) {
+ if (pUpdate.getAuthzObj().equals(PermissionsUpdate.ALL_AUTHZ_OBJ)) {
// Request to remove role from all Privileges
String roleToRemove = pUpdate.getDelPrivileges().keySet().iterator()
.next();
@@ -157,19 +160,32 @@ public class UpdateableAuthzPermissions implements
AuthzPermissions, Updateable<
}
if (pInfo != null) {
perms.addPrivilegeInfo(pInfo);
+ perms.addParentChildMappings(pUpdate.getAuthzObj());
for (Map.Entry<String, String> dMap :
pUpdate.getDelPrivileges().entrySet()) {
if (dMap.getKey().equals(PermissionsUpdate.ALL_ROLES)) {
// Remove all privileges
perms.delPrivilegeInfo(pUpdate.getAuthzObj());
+ perms.removeParentChildMappings(pUpdate.getAuthzObj());
break;
}
- FsAction fsAction = pInfo.getPermission(dMap.getKey());
- if (fsAction != null) {
- fsAction = fsAction.and(getFAction(dMap.getValue()).not());
- if (FsAction.NONE == fsAction) {
- pInfo.removePermission(dMap.getKey());
- } else {
- pInfo.setPermission(dMap.getKey(), fsAction);
+ List<PrivilegeInfo> parentAndChild = new LinkedList<PrivilegeInfo>();
+ parentAndChild.add(pInfo);
+ Set<String> children = perms.getChildren(pInfo.getAuthzObj());
+ if (children != null) {
+ for (String child : children) {
+ parentAndChild.add(perms.getPrivilegeInfo(child));
+ }
+ }
+ // recursive revoke
+ for (PrivilegeInfo pInfo2 : parentAndChild) {
+ FsAction fsAction = pInfo2.getPermission(dMap.getKey());
+ if (fsAction != null) {
+ fsAction = fsAction.and(getFAction(dMap.getValue()).not());
+ if (FsAction.NONE == fsAction) {
+ pInfo2.removePermission(dMap.getKey());
+ } else {
+ pInfo2.setPermission(dMap.getKey(), fsAction);
+ }
}
}
}
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-hdfs/sentry-hdfs-namenode-plugin/src/test/java/org/apache/sentry/hdfs/TestSentryAuthorizationProvider.java
----------------------------------------------------------------------
diff --git
a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/test/java/org/apache/sentry/hdfs/TestSentryAuthorizationProvider.java
b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/test/java/org/apache/sentry/hdfs/TestSentryAuthorizationProvider.java
index 67919fa..b766a8f 100644
---
a/sentry-hdfs/sentry-hdfs-namenode-plugin/src/test/java/org/apache/sentry/hdfs/TestSentryAuthorizationProvider.java
+++
b/sentry-hdfs/sentry-hdfs-namenode-plugin/src/test/java/org/apache/sentry/hdfs/TestSentryAuthorizationProvider.java
@@ -56,6 +56,7 @@ public class TestSentryAuthorizationProvider {
public Void run() throws Exception {
System.setProperty(MiniDFSCluster.PROP_TEST_BUILD_DATA,
"target/test/data");
Configuration conf = new HdfsConfiguration();
+
conf.setBoolean("sentry.authorization-provider.include-hdfs-authz-as-acl",
true);
conf.set(DFSConfigKeys.DFS_NAMENODE_AUTHORIZATION_PROVIDER_KEY,
MockSentryAuthorizationProvider.class.getName());
conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_ACLS_ENABLED_KEY, true);
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/MetastorePlugin.java
----------------------------------------------------------------------
diff --git
a/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/MetastorePlugin.java
b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/MetastorePlugin.java
index 08e1319..9a81e3a 100644
---
a/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/MetastorePlugin.java
+++
b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/MetastorePlugin.java
@@ -24,14 +24,19 @@ import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.concurrent.locks.Lock;
+import java.util.concurrent.locks.ReentrantLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.HiveMetaStore;
+import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
import org.apache.hadoop.hive.metastore.IHMSHandler;
import org.apache.hadoop.hive.metastore.MetaStorePreEventListener;
import org.apache.hadoop.hive.metastore.api.Database;
+import org.apache.hadoop.hive.metastore.api.MetaException;
import org.apache.hadoop.hive.metastore.api.Partition;
import org.apache.hadoop.hive.metastore.api.Table;
import org.apache.sentry.hdfs.ServiceConstants.ServerConfig;
@@ -54,20 +59,33 @@ public class MetastorePlugin extends
SentryMetastoreListenerPlugin {
private final Configuration conf;
private SentryHDFSServiceClient sentryClient;
private UpdateableAuthzPaths authzPaths;
+ private Lock notificiationLock;
//Initialized to some value > 1 so that the first update notification
// will trigger a full Image fetch
- private final AtomicInteger seqNum = new AtomicInteger(5);
+ private final AtomicLong seqNum = new AtomicLong(5);
+ private volatile long lastSentSeqNum = -1;
private final ExecutorService threadPool;
+ static class ProxyHMSHandler extends HMSHandler {
+ public ProxyHMSHandler(String name, HiveConf conf) throws MetaException
{
+ super(name, conf);
+ }
+ @Override
+ public String startFunction(String function, String extraLogInfo) {
+ return function;
+ }
+ }
+
public MetastorePlugin(Configuration conf) {
+ this.notificiationLock = new ReentrantLock();
this.conf = new HiveConf((HiveConf)conf);
this.conf.unset(HiveConf.ConfVars.METASTORE_PRE_EVENT_LISTENERS.varname);
this.conf.unset(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS.varname);
this.conf.unset(HiveConf.ConfVars.METASTORE_END_FUNCTION_LISTENERS.varname);
this.conf.unset(HiveConf.ConfVars.METASTOREURIS.varname);
try {
- this.authzPaths =
createInitialUpdate(HiveMetaStore.newHMSHandler("sentry.hdfs",
(HiveConf)this.conf));
+ this.authzPaths = createInitialUpdate(new ProxyHMSHandler("sentry.hdfs",
(HiveConf)this.conf));
} catch (Exception e1) {
LOGGER.error("Could not create Initial AuthzPaths or HMSHandler !!", e1);
throw new RuntimeException(e1);
@@ -82,20 +100,23 @@ public class MetastorePlugin extends
SentryMetastoreListenerPlugin {
threadPool.scheduleWithFixedDelay(new Runnable() {
@Override
public void run() {
+ notificiationLock.lock();
try {
long lastSeenHMSPathSeqNum =
MetastorePlugin.this.getClient().getLastSeenHMSPathSeqNum();
- if (lastSeenHMSPathSeqNum != seqNum.get()) {
- LOGGER.warn("Sentry not in sync with HMS [" +
lastSeenHMSPathSeqNum + ", " + seqNum.get() + "]");
+ if (lastSeenHMSPathSeqNum != lastSentSeqNum) {
+ LOGGER.warn("Sentry not in sync with HMS [" +
lastSeenHMSPathSeqNum + ", " + lastSentSeqNum + "]");
PathsUpdate fullImageUpdate =
MetastorePlugin.this.authzPaths.createFullImageUpdate(
- seqNum.get());
+ lastSentSeqNum);
LOGGER.warn("Sentry not in sync with HMS !!");
- notifySentry(fullImageUpdate);
+ notifySentryNoLock(fullImageUpdate, false);
}
} catch (Exception e) {
sentryClient = null;
LOGGER.error("Error talking to Sentry HDFS Service !!", e);
+ } finally {
+ notificiationLock.unlock();
}
}
}, this.conf.getLong(ServerConfig.SENTRY_HDFS_INIT_UPDATE_RETRY_DELAY_MS,
@@ -135,32 +156,61 @@ public class MetastorePlugin extends
SentryMetastoreListenerPlugin {
@Override
public void addPath(String authzObj, String path) {
+ LOGGER.debug("#### HMS Path Update ["
+ + "OP : addPath, "
+ + "authzObj : " + authzObj + ", "
+ + "path : " + path + "]");
PathsUpdate update = createHMSUpdate();
update.newPathChange(authzObj).addToAddPaths(PathsUpdate.cleanPath(path));
- notifySentry(update);
+ notifySentry(update, true);
}
@Override
- public void removeAllPaths(String authzObj) {
+ public void removeAllPaths(String authzObj, List<String> childObjects) {
+ LOGGER.debug("#### HMS Path Update ["
+ + "OP : removeAllPaths, "
+ + "authzObj : " + authzObj + ", "
+ + "childObjs : " + (childObjects == null ? "[]" : childObjects) + "]");
PathsUpdate update = createHMSUpdate();
-
update.newPathChange(authzObj).addToDelPaths(Lists.newArrayList(PathsUpdate.ALL_PATHS));
- notifySentry(update);
+ if (childObjects != null) {
+ for (String childObj : childObjects) {
+ update.newPathChange(authzObj + "." + childObj).addToDelPaths(
+ Lists.newArrayList(PathsUpdate.ALL_PATHS));
+ }
+ }
+ update.newPathChange(authzObj).addToDelPaths(
+ Lists.newArrayList(PathsUpdate.ALL_PATHS));
+ notifySentry(update, true);
}
@Override
public void removePath(String authzObj, String path) {
- PathsUpdate update = createHMSUpdate();
- update.newPathChange(authzObj).addToDelPaths(PathsUpdate.cleanPath(path));
- notifySentry(update);
+ if ("*".equals(path)) {
+ removeAllPaths(authzObj, null);
+ } else {
+ LOGGER.debug("#### HMS Path Update ["
+ + "OP : removePath, "
+ + "authzObj : " + authzObj + ", "
+ + "path : " + path + "]");
+ PathsUpdate update = createHMSUpdate();
+
update.newPathChange(authzObj).addToDelPaths(PathsUpdate.cleanPath(path));
+ notifySentry(update, true);
+ }
}
@Override
public void renameAuthzObject(String oldName, String oldPath, String newName,
String newPath) {
PathsUpdate update = createHMSUpdate();
+ LOGGER.debug("#### HMS Path Update ["
+ + "OP : renameAuthzObject, "
+ + "oldName : " + oldName + ","
+ + "newPath : " + oldPath + ","
+ + "newName : " + newName + ","
+ + "newPath : " + newPath + "]");
update.newPathChange(newName).addToAddPaths(PathsUpdate.cleanPath(newPath));
update.newPathChange(oldName).addToDelPaths(PathsUpdate.cleanPath(oldPath));
- notifySentry(update);
+ notifySentry(update, true);
}
private SentryHDFSServiceClient getClient() {
@@ -177,15 +227,30 @@ public class MetastorePlugin extends
SentryMetastoreListenerPlugin {
private PathsUpdate createHMSUpdate() {
PathsUpdate update = new PathsUpdate(seqNum.incrementAndGet(), false);
+ LOGGER.debug("#### HMS Path Update SeqNum : [" + seqNum.get() + "]");
return update;
}
- private void notifySentry(PathsUpdate update) {
- authzPaths.updatePartial(Lists.newArrayList(update), new
ReentrantReadWriteLock());
+ private void notifySentryNoLock(PathsUpdate update, boolean applyLocal) {
+ if (applyLocal) {
+ authzPaths.updatePartial(Lists.newArrayList(update), new
ReentrantReadWriteLock());
+ }
try {
getClient().notifyHMSUpdate(update);
} catch (Exception e) {
LOGGER.error("Could not send update to Sentry HDFS Service !!", e);
+ } finally {
+ lastSentSeqNum = update.getSeqNum();
+ LOGGER.debug("#### HMS Path Last update sent : [" + lastSentSeqNum +
"]");
+ }
+ }
+
+ private void notifySentry(PathsUpdate update, boolean applyLocal) {
+ notificiationLock.lock();
+ try {
+ notifySentryNoLock(update, applyLocal);
+ } finally {
+ notificiationLock.unlock();
}
}
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceProcessor.java
----------------------------------------------------------------------
diff --git
a/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceProcessor.java
b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceProcessor.java
index fd33d29..5fe89a8 100644
---
a/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceProcessor.java
+++
b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceProcessor.java
@@ -75,7 +75,7 @@ public class SentryHDFSServiceProcessor implements
SentryHDFSService.Iface {
PathsUpdate hmsUpdate = new PathsUpdate(update);
if (SentryPlugin.instance != null) {
SentryPlugin.instance.handlePathUpdateNotification(hmsUpdate);
- LOGGER.info("Authz Paths update [" + hmsUpdate.getSeqNum() + "]..");
+ LOGGER.debug("Authz Paths update [" + hmsUpdate.getSeqNum() + "]..");
} else {
LOGGER.error("SentryPlugin not initialized yet !!");
}
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceProcessorFactory.java
----------------------------------------------------------------------
diff --git
a/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceProcessorFactory.java
b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceProcessorFactory.java
index c45c294..d35de75 100644
---
a/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceProcessorFactory.java
+++
b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryHDFSServiceProcessorFactory.java
@@ -80,9 +80,13 @@ public class SentryHDFSServiceProcessorFactory extends
ProcessorFactory{
transport = ((TSaslServerTransport)
transport).getUnderlyingTransport();
} else if (transport instanceof TSaslClientTransport) {
transport = ((TSaslClientTransport)
transport).getUnderlyingTransport();
- } else if (transport instanceof TSocket) {
- return (TSocket) transport;
+ } else {
+ if (!(transport instanceof TSocket)) {
+ LOGGER.warn("Transport class [" + transport.getClass().getName() +
"] is not of type TSocket");
+ return null;
+ }
}
+ return (TSocket) transport;
}
return null;
}
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryPlugin.java
----------------------------------------------------------------------
diff --git
a/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryPlugin.java
b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryPlugin.java
index 40e952f..55b7697 100644
---
a/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryPlugin.java
+++
b/sentry-hdfs/sentry-hdfs-service/src/main/java/org/apache/sentry/hdfs/SentryPlugin.java
@@ -27,15 +27,18 @@ import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.conf.Configuration;
import org.apache.sentry.hdfs.ServiceConstants.ServerConfig;
import org.apache.sentry.hdfs.UpdateForwarder.ExternalImageRetriever;
+import org.apache.sentry.hdfs.service.thrift.TPathChanges;
import org.apache.sentry.hdfs.service.thrift.TPermissionsUpdate;
import org.apache.sentry.hdfs.service.thrift.TPrivilegeChanges;
import org.apache.sentry.hdfs.service.thrift.TRoleChanges;
import org.apache.sentry.provider.db.SentryPolicyStorePlugin;
+import
org.apache.sentry.provider.db.SentryPolicyStorePlugin.SentryPluginException;
import org.apache.sentry.provider.db.service.persistent.SentryStore;
import
org.apache.sentry.provider.db.service.thrift.TAlterSentryRoleAddGroupsRequest;
import
org.apache.sentry.provider.db.service.thrift.TAlterSentryRoleDeleteGroupsRequest;
import
org.apache.sentry.provider.db.service.thrift.TAlterSentryRoleGrantPrivilegeRequest;
import
org.apache.sentry.provider.db.service.thrift.TAlterSentryRoleRevokePrivilegeRequest;
+import org.apache.sentry.provider.db.service.thrift.TDropPrivilegesRequest;
import org.apache.sentry.provider.db.service.thrift.TDropSentryRoleRequest;
import org.apache.sentry.provider.db.service.thrift.TRenamePrivilegesRequest;
import org.apache.sentry.provider.db.service.thrift.TSentryAuthorizable;
@@ -44,6 +47,8 @@ import
org.apache.sentry.provider.db.service.thrift.TSentryPrivilege;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.google.common.base.Strings;
+
public class SentryPlugin implements SentryPolicyStorePlugin {
private static final Logger LOGGER =
LoggerFactory.getLogger(SentryPlugin.class);
@@ -87,6 +92,7 @@ public class SentryPlugin implements SentryPolicyStorePlugin {
private UpdateForwarder<PathsUpdate> pathsUpdater;
private UpdateForwarder<PermissionsUpdate> permsUpdater;
private final AtomicLong permSeqNum = new AtomicLong(5);
+ private PermImageRetriever permImageRetriever;
long getLastSeenHMSPathSeqNum() {
return pathsUpdater.getLastSeen();
@@ -102,10 +108,11 @@ public class SentryPlugin implements
SentryPolicyStorePlugin {
ServerConfig.SENTRY_HDFS_INIT_UPDATE_RETRY_DELAY_DEFAULT);
pathsUpdater = new UpdateForwarder<PathsUpdate>(new UpdateableAuthzPaths(
pathPrefixes), null, 100, initUpdateRetryDelayMs);
- PermImageRetriever permImageRetriever = new
PermImageRetriever(sentryStore);
+ permImageRetriever = new PermImageRetriever(sentryStore);
permsUpdater = new UpdateForwarder<PermissionsUpdate>(
new UpdateablePermissions(permImageRetriever), permImageRetriever,
100, initUpdateRetryDelayMs);
+ LOGGER.info("Sentry HDFS plugin initialized !!");
instance = this;
}
@@ -119,7 +126,7 @@ public class SentryPlugin implements
SentryPolicyStorePlugin {
public void handlePathUpdateNotification(PathsUpdate update) {
pathsUpdater.handleUpdateNotification(update);
- LOGGER.info("Recieved Authz Path update [" + update.getSeqNum() + "]..");
+ LOGGER.debug("Recieved Authz Path update [" + update.getSeqNum() + "]..");
}
@Override
@@ -131,7 +138,7 @@ public class SentryPlugin implements
SentryPolicyStorePlugin {
rUpdate.addToAddGroups(group.getGroupName());
}
permsUpdater.handleUpdateNotification(update);
- LOGGER.info("Authz Perm preUpdate [" + update.getSeqNum() + ", " +
request.getRoleName() + "]..");
+ LOGGER.debug("Authz Perm preUpdate [" + update.getSeqNum() + ", " +
request.getRoleName() + "]..");
}
@Override
@@ -144,7 +151,7 @@ public class SentryPlugin implements
SentryPolicyStorePlugin {
rUpdate.addToDelGroups(group.getGroupName());
}
permsUpdater.handleUpdateNotification(update);
- LOGGER.info("Authz Perm preUpdate [" + update.getSeqNum() + ", " +
request.getRoleName() + "]..");
+ LOGGER.debug("Authz Perm preUpdate [" + update.getSeqNum() + ", " +
request.getRoleName() + "]..");
}
@Override
@@ -157,7 +164,7 @@ public class SentryPlugin implements
SentryPolicyStorePlugin {
update.addPrivilegeUpdate(authzObj).putToAddPrivileges(
request.getRoleName(),
request.getPrivilege().getAction().toUpperCase());
permsUpdater.handleUpdateNotification(update);
- LOGGER.info("Authz Perm preUpdate [" + update.getSeqNum() + "]..");
+ LOGGER.debug("Authz Perm preUpdate [" + update.getSeqNum() + "]..");
}
}
@@ -171,7 +178,7 @@ public class SentryPlugin implements
SentryPolicyStorePlugin {
privUpdate.putToAddPrivileges(newAuthz, newAuthz);
privUpdate.putToDelPrivileges(oldAuthz, oldAuthz);
permsUpdater.handleUpdateNotification(update);
- LOGGER.info("Authz Perm preUpdate [" + update.getSeqNum() + ", " +
newAuthz + ", " + oldAuthz + "]..");
+ LOGGER.debug("Authz Perm preUpdate [" + update.getSeqNum() + ", " +
newAuthz + ", " + oldAuthz + "]..");
}
@Override
@@ -184,7 +191,7 @@ public class SentryPlugin implements
SentryPolicyStorePlugin {
update.addPrivilegeUpdate(authzObj).putToDelPrivileges(
request.getRoleName(),
request.getPrivilege().getAction().toUpperCase());
permsUpdater.handleUpdateNotification(update);
- LOGGER.info("Authz Perm preUpdate [" + update.getSeqNum() + ", " +
authzObj + "]..");
+ LOGGER.debug("Authz Perm preUpdate [" + update.getSeqNum() + ", " +
authzObj + "]..");
}
}
@@ -196,7 +203,18 @@ public class SentryPlugin implements
SentryPolicyStorePlugin {
request.getRoleName(), PermissionsUpdate.ALL_AUTHZ_OBJ);
update.addRoleUpdate(request.getRoleName()).addToDelGroups(PermissionsUpdate.ALL_GROUPS);
permsUpdater.handleUpdateNotification(update);
- LOGGER.info("Authz Perm preUpdate [" + update.getSeqNum() + ", " +
request.getRoleName() + "]..");
+ LOGGER.debug("Authz Perm preUpdate [" + update.getSeqNum() + ", " +
request.getRoleName() + "]..");
+ }
+
+ @Override
+ public void onDropSentryPrivilege(TDropPrivilegesRequest request)
+ throws SentryPluginException {
+ PermissionsUpdate update = new
PermissionsUpdate(permSeqNum.incrementAndGet(), false);
+ String authzObj = getAuthzObj(request.getAuthorizable());
+ update.addPrivilegeUpdate(authzObj).putToDelPrivileges(
+ PermissionsUpdate.ALL_ROLES, PermissionsUpdate.ALL_ROLES);
+ permsUpdater.handleUpdateNotification(update);
+ LOGGER.debug("Authz Perm preUpdate [" + update.getSeqNum() + ", " +
authzObj + "]..");
}
private String getAuthzObj(TSentryPrivilege privilege) {
@@ -204,7 +222,7 @@ public class SentryPlugin implements
SentryPolicyStorePlugin {
if (!SentryStore.isNULL(privilege.getDbName())) {
String dbName = privilege.getDbName();
String tblName = privilege.getTableName();
- if (tblName == null) {
+ if (SentryStore.isNULL(tblName)) {
authzObj = dbName;
} else {
authzObj = dbName + "." + tblName;
@@ -218,7 +236,7 @@ public class SentryPlugin implements
SentryPolicyStorePlugin {
if (!SentryStore.isNULL(authzble.getDb())) {
String dbName = authzble.getDb();
String tblName = authzble.getTable();
- if (tblName == null) {
+ if (SentryStore.isNULL(tblName)) {
authzObj = dbName;
} else {
authzObj = dbName + "." + tblName;
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/SentryMetastoreListenerPlugin.java
----------------------------------------------------------------------
diff --git
a/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/SentryMetastoreListenerPlugin.java
b/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/SentryMetastoreListenerPlugin.java
index 7c67cd0..79cf4a4 100644
---
a/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/SentryMetastoreListenerPlugin.java
+++
b/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/SentryMetastoreListenerPlugin.java
@@ -43,6 +43,6 @@ public abstract class SentryMetastoreListenerPlugin {
public abstract void removePath(String authzObj, String path);
- public abstract void removeAllPaths(String authzObj);
+ public abstract void removeAllPaths(String authzObj, List<String>
childObjects);
}
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/SentryPolicyStorePlugin.java
----------------------------------------------------------------------
diff --git
a/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/SentryPolicyStorePlugin.java
b/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/SentryPolicyStorePlugin.java
index 7f4f2ca..998a48b 100644
---
a/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/SentryPolicyStorePlugin.java
+++
b/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/SentryPolicyStorePlugin.java
@@ -25,6 +25,7 @@ import
org.apache.sentry.provider.db.service.thrift.TAlterSentryRoleAddGroupsReq
import
org.apache.sentry.provider.db.service.thrift.TAlterSentryRoleDeleteGroupsRequest;
import
org.apache.sentry.provider.db.service.thrift.TAlterSentryRoleGrantPrivilegeRequest;
import
org.apache.sentry.provider.db.service.thrift.TAlterSentryRoleRevokePrivilegeRequest;
+import org.apache.sentry.provider.db.service.thrift.TDropPrivilegesRequest;
import org.apache.sentry.provider.db.service.thrift.TDropSentryRoleRequest;
import org.apache.sentry.provider.db.service.thrift.TRenamePrivilegesRequest;
@@ -54,4 +55,6 @@ public interface SentryPolicyStorePlugin {
public void onRenameSentryPrivilege(TRenamePrivilegesRequest request) throws
SentryPluginException;
+ public void onDropSentryPrivilege(TDropPrivilegesRequest request) throws
SentryPluginException;
+
}
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyServiceClient.java
----------------------------------------------------------------------
diff --git
a/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyServiceClient.java
b/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyServiceClient.java
index 2dc724e..65905f5 100644
---
a/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyServiceClient.java
+++
b/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyServiceClient.java
@@ -44,7 +44,7 @@ import
org.apache.sentry.service.thrift.ServiceConstants.ServerConfig;
import org.apache.sentry.service.thrift.ServiceConstants.ThriftConstants;
import org.apache.sentry.service.thrift.Status;
import org.apache.thrift.TException;
-import org.apache.thrift.protocol.TCompactProtocol;
+import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TMultiplexedProtocol;
import org.apache.thrift.transport.TSaslClientTransport;
import org.apache.thrift.transport.TSocket;
@@ -157,7 +157,7 @@ public class SentryPolicyServiceClient {
}
LOGGER.debug("Successfully opened transport: " + transport + " to " +
serverAddress);
TMultiplexedProtocol protocol = new TMultiplexedProtocol(
- new TCompactProtocol(transport),
+ new TBinaryProtocol(transport),
SentryPolicyStoreProcessor.SENTRY_POLICY_SERVICE_NAME);
client = new SentryPolicyService.Client(protocol);
LOGGER.debug("Successfully created client");
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyStoreProcessor.java
----------------------------------------------------------------------
diff --git
a/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyStoreProcessor.java
b/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyStoreProcessor.java
index 8fd7197..803ea14 100644
---
a/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyStoreProcessor.java
+++
b/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/provider/db/service/thrift/SentryPolicyStoreProcessor.java
@@ -266,10 +266,10 @@ public class SentryPolicyStoreProcessor implements
SentryPolicyService.Iface {
}
} catch (SentryNoSuchObjectException e) {
String msg = "Privilege: [server=" +
request.getPrivilege().getServerName() +
- ",db=" + request.getPrivilege().getDbName() +
- ",table=" + request.getPrivilege().getTableName() +
- ",URI=" + request.getPrivilege().getURI() +
- ",action=" + request.getPrivilege().getAction() + "] doesn't
exist.";
+ ",db=" + request.getPrivilege().getDbName() +
+ ",table=" + request.getPrivilege().getTableName() +
+ ",URI=" + request.getPrivilege().getURI() +
+ ",action=" + request.getPrivilege().getAction() + "] doesn't
exist.";
LOGGER.error(msg, e);
response.setStatus(Status.NoSuchObject(msg, e));
} catch (SentryInvalidInputException e) {
@@ -554,6 +554,9 @@ public class SentryPolicyStoreProcessor implements
SentryPolicyService.Iface {
try {
authorize(request.getRequestorUserName(), adminGroups);
sentryStore.dropPrivilege(request.getAuthorizable());
+ for (SentryPolicyStorePlugin plugin : sentryPlugins) {
+ plugin.onDropSentryPrivilege(request);
+ }
response.setStatus(Status.OK());
} catch (SentryAccessDeniedException e) {
LOGGER.error(e.getMessage(), e);
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/service/thrift/SentryService.java
----------------------------------------------------------------------
diff --git
a/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/service/thrift/SentryService.java
b/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/service/thrift/SentryService.java
index 4b3ff94..bb7d989 100644
---
a/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/service/thrift/SentryService.java
+++
b/sentry-provider/sentry-provider-db/src/main/java/org/apache/sentry/service/thrift/SentryService.java
@@ -207,7 +207,7 @@ public class SentryService implements Callable {
TThreadPoolServer.Args args = new TThreadPoolServer.Args(
serverTransport).processor(processor)
.transportFactory(transportFactory)
- .protocolFactory(new TCompactProtocol.Factory())
+ .protocolFactory(new TBinaryProtocol.Factory())
.minWorkerThreads(minThreads).maxWorkerThreads(maxThreads);
thriftServer = new TThreadPoolServer(args);
LOGGER.info("Serving on " + address);
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegration.java
----------------------------------------------------------------------
diff --git
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegration.java
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegration.java
index 5d1a92c..9e87158 100644
---
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegration.java
+++
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/hdfs/TestHDFSIntegration.java
@@ -42,6 +42,7 @@ import junit.framework.Assert;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FsStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.AclEntry;
import org.apache.hadoop.fs.permission.AclEntryType;
@@ -98,26 +99,6 @@ import com.google.common.io.Resources;
public class TestHDFSIntegration {
- // mock user group mapping that maps user to same group
- public static class PseudoGroupMappingService implements
- GroupMappingServiceProvider {
-
- @Override
- public List<String> getGroups(String user) {
- return Lists.newArrayList(user, System.getProperty("user.name"));
- }
-
- @Override
- public void cacheGroupsRefresh() throws IOException {
- // no-op
- }
-
- @Override
- public void cacheGroupsAdd(List<String> groups) throws IOException {
- // no-op
- }
- }
-
public static class WordCountMapper extends MapReduceBase implements
Mapper<LongWritable, Text, String, Long> {
@@ -222,6 +203,8 @@ public class TestHDFSIntegration {
hiveConf.set("sentry.hdfs.service.client.server.rpc-address",
"localhost");
hiveConf.set("sentry.hdfs.service.client.server.rpc-port",
String.valueOf(sentryPort));
hiveConf.set("sentry.service.client.server.rpc-port",
String.valueOf(sentryPort));
+// hiveConf.set("sentry.service.server.compact.transport", "true");
+// hiveConf.set("sentry.service.client.compact.transport", "true");
hiveConf.set("sentry.service.security.mode", "none");
hiveConf.set("sentry.hdfs.service.security.mode", "none");
hiveConf.set("sentry.hdfs.init.update.retry.delay.ms", "500");
@@ -279,11 +262,32 @@ public class TestHDFSIntegration {
.set(hiveSite.toURI().toURL());
metastore = new InternalMetastoreServer(hiveConf);
- metastore.start();
+ new Thread() {
+ @Override
+ public void run() {
+ try {
+ metastore.start();
+ while(true){}
+ } catch (Exception e) {
+ System.out.println("Could not start Hive Server");
+ }
+ }
+ }.start();
hiveServer2 = new InternalHiveServer(hiveConf);
- hiveServer2.start();
-
+ new Thread() {
+ @Override
+ public void run() {
+ try {
+ hiveServer2.start();
+ while(true){}
+ } catch (Exception e) {
+ System.out.println("Could not start Hive Server");
+ }
+ }
+ }.start();
+
+ Thread.sleep(10000);
return null;
}
});
@@ -306,7 +310,7 @@ public class TestHDFSIntegration {
MiniDFS.PseudoGroupMappingService.class.getName());
Configuration.addDefaultResource("test.xml");
- conf.set("sentry.authorization-provider.hdfs-path-prefixes",
"/user/hive/warehouse");
+ conf.set("sentry.authorization-provider.hdfs-path-prefixes",
"/user/hive/warehouse,/tmp/external");
conf.set("sentry.authorization-provider.cache-refresh-retry-wait.ms",
"5000");
conf.set("sentry.authorization-provider.cache-stale-threshold.ms",
"3000");
@@ -342,6 +346,20 @@ public class TestHDFSIntegration {
+ miniDFS.getFileSystem().getFileStatus(tmpPath).getGroup() + ", "
+ miniDFS.getFileSystem().getFileStatus(tmpPath).getPermission() +
", "
+ "\n\n");
+
+ int dfsSafeCheckRetry = 30;
+ boolean hasStarted = false;
+ for (int i = dfsSafeCheckRetry; i > 0; i--) {
+ if (!miniDFS.getFileSystem().isInSafeMode()) {
+ hasStarted = true;
+ System.out.println("HDFS safemode check num times : " + (31 - i));
+ break;
+ }
+ }
+ if (!hasStarted) {
+ throw new RuntimeException("HDFS hasnt exited safe mode yet..");
+ }
+
return null;
}
});
@@ -362,7 +380,9 @@ public class TestHDFSIntegration {
.put(ConfVars.HIVE_SERVER2_THRIFT_MIN_WORKER_THREADS.varname, "2");
properties.put("hive.metastore.uris", "thrift://localhost:" + hmsPort);
properties.put(ServerConfig.SECURITY_MODE,
ServerConfig.SECURITY_MODE_NONE);
+// properties.put("sentry.service.server.compact.transport", "true");
properties.put("sentry.hive.testing.mode", "true");
+ properties.put("sentry.service.reporting", "CONSOLE");
properties.put(ServerConfig.ADMIN_GROUPS, "hive,admin");
properties.put(ServerConfig.RPC_ADDRESS, "localhost");
properties.put(ServerConfig.RPC_PORT, String.valueOf(sentryPort < 0 ?
0 : sentryPort));
@@ -478,12 +498,7 @@ public class TestHDFSIntegration {
// Create new table and verify everything is fine after restart...
stmt.execute("create table p2 (s string) partitioned by (month int, day
int)");
- try {
- stmt.execute("alter table p2 add partition (month=1, day=1)");
- } catch (Exception e) {
- // Metastore throws and exception first time after sentry restart
- stmt.execute("alter table p2 add partition (month=1, day=1)");
- }
+ stmt.execute("alter table p2 add partition (month=1, day=1)");
stmt.execute("alter table p2 add partition (month=1, day=2)");
stmt.execute("alter table p2 add partition (month=2, day=1)");
stmt.execute("alter table p2 add partition (month=2, day=2)");
@@ -499,6 +514,73 @@ public class TestHDFSIntegration {
Thread.sleep(1000);
verifyOnAllSubDirs("/user/hive/warehouse/p2", FsAction.READ_EXECUTE,
"hbase", true);
+ // Create external table
+ writeToPath("/tmp/external/ext1", 5, "foo", "bar");
+
+ stmt.execute("create table ext1 (s string) location
\'/tmp/external/ext1\'");
+ ResultSet rs = stmt.executeQuery("select * from ext1");
+ int numRows = 0;
+ while (rs.next()) { numRows++; }
+ Assert.assertEquals(5, numRows);
+
+ // Ensure existing group permissions are never returned..
+ verifyOnAllSubDirs("/tmp/external/ext1", null, "bar", false);
+ verifyOnAllSubDirs("/tmp/external/ext1", null, "hbase", false);
+
+ stmt.execute("grant all on table ext1 to role p1_admin");
+ Thread.sleep(1000);
+ verifyOnAllSubDirs("/tmp/external/ext1", FsAction.ALL, "hbase", true);
+
+ stmt.execute("revoke select on table ext1 from role p1_admin");
+ Thread.sleep(1000);
+ verifyOnAllSubDirs("/tmp/external/ext1", FsAction.WRITE_EXECUTE, "hbase",
true);
+
+ // Verify database operations works correctly
+ stmt.execute("create database db1");
+ Thread.sleep(1000);
+ verifyOnAllSubDirs("/user/hive/warehouse/db1.db", null, "hbase", false);
+
+ stmt.execute("create table db1.tbl1 (s string)");
+ Thread.sleep(1000);
+ verifyOnAllSubDirs("/user/hive/warehouse/db1.db/tbl1", null, "hbase",
false);
+ stmt.execute("create table db1.tbl2 (s string)");
+ Thread.sleep(1000);
+ verifyOnAllSubDirs("/user/hive/warehouse/db1.db/tbl2", null, "hbase",
false);
+
+ // Verify db privileges are propagated to tables
+ stmt.execute("grant select on database db1 to role p1_admin");
+ Thread.sleep(1000);
+ verifyOnAllSubDirs("/user/hive/warehouse/db1.db/tbl1",
FsAction.READ_EXECUTE, "hbase", true);
+ verifyOnAllSubDirs("/user/hive/warehouse/db1.db/tbl2",
FsAction.READ_EXECUTE, "hbase", true);
+
+ stmt.execute("use db1");
+ stmt.execute("grant all on table tbl1 to role p1_admin");
+ Thread.sleep(1000);
+
+ verifyOnAllSubDirs("/user/hive/warehouse/db1.db/tbl1", FsAction.ALL,
"hbase", true);
+ verifyOnAllSubDirs("/user/hive/warehouse/db1.db/tbl2",
FsAction.READ_EXECUTE, "hbase", true);
+
+ // Verify recursive revoke
+ stmt.execute("revoke select on database db1 from role p1_admin");
+ Thread.sleep(1000);
+
+ verifyOnAllSubDirs("/user/hive/warehouse/db1.db/tbl1",
FsAction.WRITE_EXECUTE, "hbase", true);
+ verifyOnAllSubDirs("/user/hive/warehouse/db1.db/tbl2", null, "hbase",
false);
+
+ // Verify cleanup..
+ stmt.execute("drop table tbl1");
+ Thread.sleep(1000);
+ Assert.assertFalse(miniDFS.getFileSystem().exists(new
Path("/user/hive/warehouse/db1.db/tbl1")));
+
+ stmt.execute("drop table tbl2");
+ Thread.sleep(1000);
+ Assert.assertFalse(miniDFS.getFileSystem().exists(new
Path("/user/hive/warehouse/db1.db/tbl2")));
+
+ stmt.execute("use default");
+ stmt.execute("drop database db1");
+ Thread.sleep(1000);
+ Assert.assertFalse(miniDFS.getFileSystem().exists(new
Path("/user/hive/warehouse/db1.db")));
+
stmt.close();
conn.close();
}
@@ -527,6 +609,19 @@ public class TestHDFSIntegration {
rs.close();
}
+ private void writeToPath(String path, int numRows, String user, String
group) throws IOException {
+ Path p = new Path(path);
+ miniDFS.getFileSystem().mkdirs(p);
+ miniDFS.getFileSystem().setOwner(p, user, group);
+// miniDFS.getFileSystem().setPermission(p,
FsPermission.valueOf("-rwxrwx---"));
+ FSDataOutputStream f1 = miniDFS.getFileSystem().create(new Path(path +
"/stuff.txt"));
+ for (int i = 0; i < numRows; i++) {
+ f1.writeChars("random" + i + "\n");
+ }
+ f1.flush();
+ f1.close();
+ }
+
private void verifyHDFSandMR(Statement stmt) throws IOException,
InterruptedException, SQLException, Exception {
// hbase user should not be allowed to read...
@@ -548,8 +643,8 @@ public class TestHDFSIntegration {
// runWordCount(new JobConf(miniMR.getConfig()),
"/user/hive/warehouse/p1/month=1/day=1", "/tmp/wc_out");
stmt.execute("grant select on table p1 to role p1_admin");
-
Thread.sleep(1000);
+
verifyOnAllSubDirs("/user/hive/warehouse/p1", FsAction.READ_EXECUTE,
"hbase", true);
// hbase user should now be allowed to read...
hbaseUgi.doAs(new PrivilegedExceptionAction<Void>() {
http://git-wip-us.apache.org/repos/asf/incubator-sentry/blob/e9160ba8/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/metastore/AbstractMetastoreTestWithStaticConfiguration.java
----------------------------------------------------------------------
diff --git
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/metastore/AbstractMetastoreTestWithStaticConfiguration.java
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/metastore/AbstractMetastoreTestWithStaticConfiguration.java
index c759620..c66047c 100644
---
a/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/metastore/AbstractMetastoreTestWithStaticConfiguration.java
+++
b/sentry-tests/sentry-tests-hive/src/test/java/org/apache/sentry/tests/e2e/metastore/AbstractMetastoreTestWithStaticConfiguration.java
@@ -52,7 +52,6 @@ public abstract class
AbstractMetastoreTestWithStaticConfiguration extends
@BeforeClass
public static void setupTestStaticConfiguration() throws Exception {
useSentryService = true;
- setMetastoreListener = true;
testServerType = HiveServer2Type.InternalMetastore.name();
AbstractTestWithStaticConfiguration.setupTestStaticConfiguration();
}