Repository: ranger Updated Branches: refs/heads/master 54c87fec5 -> 44691b556
RANGER-1830:Write unit test for RANGER-1810 sqoop plugin Signed-off-by: peng.jianhua <[email protected]> Project: http://git-wip-us.apache.org/repos/asf/ranger/repo Commit: http://git-wip-us.apache.org/repos/asf/ranger/commit/44691b55 Tree: http://git-wip-us.apache.org/repos/asf/ranger/tree/44691b55 Diff: http://git-wip-us.apache.org/repos/asf/ranger/diff/44691b55 Branch: refs/heads/master Commit: 44691b556dfe427d0c873c4f6537f55fa207a28c Parents: 54c87fe Author: zhangqiang2 <[email protected]> Authored: Wed Nov 8 09:43:47 2017 +0800 Committer: peng.jianhua <[email protected]> Committed: Thu Nov 9 10:48:28 2017 +0800 ---------------------------------------------------------------------- plugin-sqoop/pom.xml | 14 + .../sqoop/authorizer/RangerAdminClientImpl.java | 92 ++ .../authorizer/RangerSqoopAuthorizerTest.java | 914 +++++++++++++++++++ .../src/test/resources/log4j.properties | 34 + .../test/resources/ranger-sqoop-security.xml | 60 ++ .../src/test/resources/sqoop-policies.json | 539 +++++++++++ .../src/test/resources/sqoop.properties | 68 ++ .../test/resources/sqoop_bootstrap.properties | 37 + 8 files changed, 1758 insertions(+) ---------------------------------------------------------------------- http://git-wip-us.apache.org/repos/asf/ranger/blob/44691b55/plugin-sqoop/pom.xml ---------------------------------------------------------------------- diff --git a/plugin-sqoop/pom.xml b/plugin-sqoop/pom.xml index 0c52e66..58d10a4 100644 --- a/plugin-sqoop/pom.xml +++ b/plugin-sqoop/pom.xml @@ -33,6 +33,20 @@ </parent> <dependencies> <dependency> + <groupId>junit</groupId> + <artifactId>junit</artifactId> + </dependency> + <dependency> + <groupId>org.mockito</groupId> + <artifactId>mockito-core</artifactId> + </dependency> + <dependency> + <groupId>org.apache.sqoop</groupId> + <artifactId>sqoop-security</artifactId> + <version>${sqoop.version}</version> + <scope>test</scope> + </dependency> + <dependency> <groupId>org.apache.sqoop</groupId> <artifactId>sqoop-core</artifactId> <version>${sqoop.version}</version> http://git-wip-us.apache.org/repos/asf/ranger/blob/44691b55/plugin-sqoop/src/test/java/org/apache/ranger/authorization/sqoop/authorizer/RangerAdminClientImpl.java ---------------------------------------------------------------------- diff --git a/plugin-sqoop/src/test/java/org/apache/ranger/authorization/sqoop/authorizer/RangerAdminClientImpl.java b/plugin-sqoop/src/test/java/org/apache/ranger/authorization/sqoop/authorizer/RangerAdminClientImpl.java new file mode 100644 index 0000000..689650b --- /dev/null +++ b/plugin-sqoop/src/test/java/org/apache/ranger/authorization/sqoop/authorizer/RangerAdminClientImpl.java @@ -0,0 +1,92 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ranger.authorization.sqoop.authorizer; + +import java.io.File; +import java.nio.file.FileSystems; +import java.nio.file.Files; +import java.util.List; + +import org.apache.ranger.admin.client.RangerAdminClient; +import org.apache.ranger.plugin.util.GrantRevokeRequest; +import org.apache.ranger.plugin.util.ServicePolicies; +import org.apache.ranger.plugin.util.ServiceTags; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import com.google.gson.Gson; +import com.google.gson.GsonBuilder; + +/** + * A test implementation of the RangerAdminClient interface that just reads + * policies in from a file and returns them + */ +public class RangerAdminClientImpl implements RangerAdminClient { + private static final Logger LOG = LoggerFactory.getLogger(RangerAdminClientImpl.class); + private static final String cacheFilename = "sqoop-policies.json"; + private Gson gson; + + @Override + public void init(String serviceName, String appId, String configPropertyPrefix) { + Gson gson = null; + try { + gson = new GsonBuilder().setDateFormat("yyyyMMdd-HH:mm:ss.SSS-Z").setPrettyPrinting().create(); + } catch (Throwable excp) { + LOG.error("RangerAdminClientImpl: failed to create GsonBuilder object", excp); + } + this.gson = gson; + } + + @Override + public ServicePolicies getServicePoliciesIfUpdated(long lastKnownVersion, long lastActivationTimeInMillis) + throws Exception { + + String basedir = System.getProperty("basedir"); + if (basedir == null) { + basedir = new File(".").getCanonicalPath(); + } + + java.nio.file.Path cachePath = FileSystems.getDefault() + .getPath(basedir, "/src/test/resources/" + cacheFilename); + byte[] cacheBytes = Files.readAllBytes(cachePath); + + return gson.fromJson(new String(cacheBytes), ServicePolicies.class); + } + + @Override + public void grantAccess(GrantRevokeRequest request) throws Exception { + + } + + @Override + public void revokeAccess(GrantRevokeRequest request) throws Exception { + + } + + @Override + public ServiceTags getServiceTagsIfUpdated(long lastKnownVersion, long lastActivationTimeInMillis) throws Exception { + return null; + + } + + @Override + public List<String> getTagTypes(String tagTypePattern) throws Exception { + return null; + } + +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ranger/blob/44691b55/plugin-sqoop/src/test/java/org/apache/ranger/authorization/sqoop/authorizer/RangerSqoopAuthorizerTest.java ---------------------------------------------------------------------- diff --git a/plugin-sqoop/src/test/java/org/apache/ranger/authorization/sqoop/authorizer/RangerSqoopAuthorizerTest.java b/plugin-sqoop/src/test/java/org/apache/ranger/authorization/sqoop/authorizer/RangerSqoopAuthorizerTest.java new file mode 100644 index 0000000..7e0e562 --- /dev/null +++ b/plugin-sqoop/src/test/java/org/apache/ranger/authorization/sqoop/authorizer/RangerSqoopAuthorizerTest.java @@ -0,0 +1,914 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.ranger.authorization.sqoop.authorizer; + +import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +import java.io.File; +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +import org.apache.commons.lang.RandomStringUtils; +import org.apache.sqoop.common.SqoopException; +import org.apache.sqoop.core.ConfigurationConstants; +import org.apache.sqoop.core.SqoopConfiguration; +import org.apache.sqoop.model.MJob; +import org.apache.sqoop.model.MLink; +import org.apache.sqoop.repository.Repository; +import org.apache.sqoop.repository.RepositoryManager; +import org.apache.sqoop.security.AuthorizationManager; +import org.apache.sqoop.security.authorization.AuthorizationEngine; +import org.junit.AfterClass; +import org.junit.BeforeClass; +import org.junit.FixMethodOrder; +import org.junit.Test; +import org.junit.runners.MethodSorters; + +/** +* +* Here we plug the Ranger RangerSqoopAuthorizer into Sqoop. +* +* A custom RangerAdminClient is plugged into Ranger in turn, which loads security policies from a local file. +* These policies were generated in the Ranger Admin UI for a sqoop service called "sqoopTest": +* +* a) A user "sqoop" can do all permissions(contains the "read" and "write") on all connectors, any link and any job; +* b) A user "zhangqiang" can do a "read" on the connector "kafka-connector", +* and "zhangqiang" is the creator of any job and any link by mock; +* c) A user "yuwen" can do "read" and "write" on the connector "oracle-jdbc-connector" and "hdfs-connector"; +* d) A user "yuwen" can do "read" and "write" on the link "oracle-link" and "hdfs-link"; +* e) A user "yuwen" can do "read" and "write" on the job "oracle2hdfs-job"; +* +*/ +@FixMethodOrder(MethodSorters.NAME_ASCENDING) +public class RangerSqoopAuthorizerTest { + private static final List<String> allConnectors = new ArrayList<>(); + + private static final String SQOOP = "sqoop"; + + private static final String ZHANGQIANG = "zhangqiang"; + + private static final String YUWEN = "yuwen"; + + private static final String ORACLE_JDBC_CONNECTOR = "oracle-jdbc-connector"; + + private static final String SFTP_CONNECTOR = "sftp-connector"; + + private static final String KAFKA_CONNECTOR = "kafka-connector"; + + private static final String KITE_CONNECTOR = "kite-connector"; + + private static final String FTP_CONNECTOR = "ftp-connector"; + + private static final String HDFS_CONNECTOR = "hdfs-connector"; + + private static final String GENERIC_JDBC_CONNECTOR = "generic-jdbc-connector"; + + private static final String ORACLE_LINK = "oracle-link"; + + private static final String HDFS_LINK = "hdfs-link"; + + private static final String ORACLE2HDFS_JOB = "oracle2hdfs-job"; + + @BeforeClass + public static void setup() throws Exception { + // init sqoop all connectors + addAllConnectors(); + + // init sqoop to enable ranger authentication + initSqoopAuth(); + } + + @AfterClass + public static void cleanup() throws Exception { + // do nothing + } + + /** + * Help function: init sqoop all connectors + */ + private static void addAllConnectors() { + allConnectors.add(ORACLE_JDBC_CONNECTOR); + allConnectors.add(SFTP_CONNECTOR); + allConnectors.add(KAFKA_CONNECTOR); + allConnectors.add(KITE_CONNECTOR); + allConnectors.add(FTP_CONNECTOR); + allConnectors.add(HDFS_CONNECTOR); + allConnectors.add(GENERIC_JDBC_CONNECTOR); + } + + /** + * Help function: init sqoop to enable ranger authentication + */ + private static void initSqoopAuth() throws IOException, ClassNotFoundException, IllegalAccessException, + InstantiationException { + // init sqoop configruation + String basedir = System.getProperty("basedir"); + if (basedir == null) { + basedir = new File(".").getCanonicalPath(); + } + String sqoopConfigDirPath = basedir + "/src/test/resources/"; + System.setProperty(ConfigurationConstants.SYSPROP_CONFIG_DIR, sqoopConfigDirPath); + SqoopConfiguration.getInstance().initialize(); + + // init sqoop authorization + AuthorizationManager.getInstance().initialize(); + + // mock sqoop class for authentication + RepositoryManager repositoryManager = mock(RepositoryManager.class); + RepositoryManager.setInstance(repositoryManager); + Repository repository = mock(Repository.class); + when(repositoryManager.getRepository()).thenReturn(repository); + + MLink link = mock(MLink.class); + when(repository.findLink(anyString())).thenReturn(link); + MJob job = mock(MJob.class); + when(repository.findJob(anyString())).thenReturn(job); + + // mock user "zhangqiang" as the creator of any link and any job + when(link.getCreationUser()).thenReturn(ZHANGQIANG); + when(job.getCreationUser()).thenReturn(ZHANGQIANG); + } + + /** + * Help function: get random link name + */ + private String getRandomLinkName() { + return RandomStringUtils.randomAlphanumeric(10) + "-link"; + } + + /** + * Help function: get random job name + */ + private String getRandomJobName() { + return RandomStringUtils.randomAlphanumeric(10) + "-job"; + } + + // No.1 readConnector test start + /** + * sqoop read all connectors success + */ + @Test + public void readConnectorAllWithAllPermissions() { + String user = SQOOP; + for (String connector : allConnectors) { + AuthorizationEngine.readConnector(user, connector); + } + } + + /** + * zhangqiang read kafka-connector success + */ + @Test + public void readConnectorKafkaWithReadPermission() { + String user = ZHANGQIANG; + String connector = KAFKA_CONNECTOR; + AuthorizationEngine.readConnector(user, connector); + } + + /** + * zhangqiang read hdfs-connector failed + */ + @Test(expected = SqoopException.class) + public void readConnectorHdfsWithoutPermission() { + String user = ZHANGQIANG; + String connector = HDFS_CONNECTOR; + AuthorizationEngine.readConnector(user, connector); + } + + /** + * yuwen read oracle-jdbc-connector success + */ + @Test + public void readConnectorOracleJdbcWithAllPermissions() { + String user = YUWEN; + String connector = ORACLE_JDBC_CONNECTOR; + AuthorizationEngine.readConnector(user, connector); + } + + /** + * yuwen read hdfs-connector success + */ + @Test + public void readConnectorHdfsWithAllPermissions() { + String user = YUWEN; + String connector = HDFS_CONNECTOR; + AuthorizationEngine.readConnector(user, connector); + } + + /** + * yuwen read kafka-connector failed + */ + @Test(expected = SqoopException.class) + public void readConnectorKafkaWithoutPermission() { + String user = YUWEN; + String connector = KAFKA_CONNECTOR; + AuthorizationEngine.readConnector(user, connector); + } + + // No.1 readConnector test end + + // No.2 readLink test start + /** + * sqoop read any link success + */ + @Test + public void readLinkAnyWithAllPermissions() { + String user = SQOOP; + String link = getRandomLinkName(); + AuthorizationEngine.readLink(user, link); + } + + /** + * zhangqiang read any link success + */ + @Test + public void readLinkAnyAsCreater() { + String user = ZHANGQIANG; + String link = getRandomLinkName(); + AuthorizationEngine.readLink(user, link); + } + + /** + * yuwen read oracle-link success + */ + @Test + public void readLinkOracleWithReadPermission() { + String user = YUWEN; + String link = ORACLE_LINK; + AuthorizationEngine.readLink(user, link); + } + + /** + * yuwen read hdfs-link success + */ + @Test + public void readLinkHdfsWithReadPermission() { + String user = YUWEN; + String link = HDFS_LINK; + AuthorizationEngine.readLink(user, link); + } + + /** + * yuwen read any link failed + */ + @Test(expected = SqoopException.class) + public void readLinkAnyWithoutPermission() { + String user = YUWEN; + String link = getRandomLinkName(); + AuthorizationEngine.readLink(user, link); + } + + // No.2 readLink test end + + // No.3 createLink test start + /** + * sqoop create link by all connectors success + */ + @Test + public void createLinkByAllConnectorsWithAllPermissions() { + String user = SQOOP; + for (String connector : allConnectors) { + AuthorizationEngine.createLink(user, connector); + } + } + + /** + * zhangqiang create link by kafka-connector success + */ + @Test + public void createLinkByKafkaConnectorWithReadPermission() { + String user = ZHANGQIANG; + String connector = KAFKA_CONNECTOR; + AuthorizationEngine.createLink(user, connector); + } + + /** + * zhangqiang create link by hdfs-connector failed + */ + @Test(expected = SqoopException.class) + public void createLinkByHdfsConnectorWithoutPermission() { + String user = ZHANGQIANG; + String connector = HDFS_CONNECTOR; + AuthorizationEngine.createLink(user, connector); + } + + /** + * yuwen create link by oracle-jdbc-connector success + */ + @Test + public void createLinkByOracleJdbcConnectorWithReadPermission() { + String user = YUWEN; + String connector = ORACLE_JDBC_CONNECTOR; + AuthorizationEngine.createLink(user, connector); + } + + /** + * yuwen create link by hdfs-connector success + */ + @Test + public void createLinkByHdfsConnectorWithReadPermission() { + String user = YUWEN; + String connector = HDFS_CONNECTOR; + AuthorizationEngine.createLink(user, connector); + } + + /** + * yuwen create link by kafka-connector failed + */ + @Test(expected = SqoopException.class) + public void createLinkByKafkaConnectorWithoutPermission() { + String user = YUWEN; + String connector = KAFKA_CONNECTOR; + AuthorizationEngine.createLink(user, connector); + } + + // No.3 createLink test end + + // No.4 updateLink test start + /** + * sqoop update any link created by all connectors success + */ + @Test + public void updateLinkAnyByAllConnectorsWithAllPermissions() { + String user = SQOOP; + for (String connector : allConnectors) { + String link = getRandomLinkName(); + AuthorizationEngine.updateLink(user, connector, link); + } + } + + /** + * zhangqiang update any link created by kafka-connector success + */ + @Test + public void updateLinkAnyByKafkaConnectorAsCreater() { + String user = ZHANGQIANG; + String connector = KAFKA_CONNECTOR; + String link = getRandomLinkName(); + AuthorizationEngine.updateLink(user, connector, link); + } + + /** + * zhangqiang update any link created by hdfs-connector failed + */ + @Test(expected = SqoopException.class) + public void updateLinkAnyByHdfsConnectorWithoutPermission() { + String user = ZHANGQIANG; + String connector = HDFS_CONNECTOR; + String link = getRandomLinkName(); + AuthorizationEngine.updateLink(user, connector, link); + } + + /** + * yuwen update link created by oracle-jdbc-connector success + */ + @Test + public void updateLinkByOracleJdbcConnectorWithWritePermission() { + String user = YUWEN; + String connector = ORACLE_JDBC_CONNECTOR; + String link = ORACLE_LINK; + AuthorizationEngine.updateLink(user, connector, link); + } + + /** + * yuwen update link created by hdfs-connector success + */ + @Test + public void updateLinkByHdfsConnectorWithReadPermission() { + String user = YUWEN; + String connector = HDFS_CONNECTOR; + String link = HDFS_LINK; + AuthorizationEngine.updateLink(user, connector, link); + } + + /** + * yuwen update link created by kafka-connector failed + */ + @Test(expected = SqoopException.class) + public void updateLinkByKafkaConnectorWithoutPermission() { + String user = YUWEN; + String connector = KAFKA_CONNECTOR; + String link = getRandomLinkName(); + AuthorizationEngine.updateLink(user, connector, link); + } + + // No.4 updateLink test end + + // No.5 deleteLink test start + /** + * sqoop delete any link success + */ + @Test + public void deleteLinkAnyWithAllPermissions() { + String user = SQOOP; + String link = getRandomLinkName(); + AuthorizationEngine.deleteLink(user, link); + } + + /** + * zhangqiang delete any link success + */ + @Test + public void deleteLinkAnyAsCreater() { + String user = ZHANGQIANG; + String link = getRandomLinkName(); + AuthorizationEngine.deleteLink(user, link); + } + + /** + * yuwen delete oracle-link success + */ + @Test + public void deleteLinkOracleWithWritePermission() { + String user = YUWEN; + String link = ORACLE_LINK; + AuthorizationEngine.deleteLink(user, link); + } + + /** + * yuwen delete hdfs-link success + */ + @Test + public void deleteLinkHdfsWithWritePermission() { + String user = YUWEN; + String link = HDFS_LINK; + AuthorizationEngine.deleteLink(user, link); + } + + /** + * yuwen delete any link failed + */ + @Test(expected = SqoopException.class) + public void deleteLinkAnyWithoutPermission() { + String user = YUWEN; + String link = getRandomLinkName(); + AuthorizationEngine.deleteLink(user, link); + } + + // No.5 deleteLink test end + + // No.6 enableDisableLink test start + /** + * sqoop enable disable any link success + */ + @Test + public void enableDisableLinkAnyWithAllPermissions() { + String user = SQOOP; + String link = getRandomLinkName(); + AuthorizationEngine.enableDisableLink(user, link); + } + + /** + * zhangqiang enable disable any link success + */ + @Test + public void enableDisableLinkAnyAsCreater() { + String user = ZHANGQIANG; + String link = getRandomLinkName(); + AuthorizationEngine.enableDisableLink(user, link); + } + + /** + * yuwen enable disable oracle-link success + */ + @Test + public void enableDisableLinkOracleWithWritePermission() { + String user = YUWEN; + String link = ORACLE_LINK; + AuthorizationEngine.enableDisableLink(user, link); + } + + /** + * yuwen enable disable hdfs-link success + */ + @Test + public void enableDisableLinkHdfsWithWritePermission() { + String user = YUWEN; + String link = HDFS_LINK; + AuthorizationEngine.enableDisableLink(user, link); + } + + /** + * yuwen enable disable any link failed + */ + @Test(expected = SqoopException.class) + public void enableDisableLinkAnyWithoutPermission() { + String user = YUWEN; + String link = getRandomLinkName(); + AuthorizationEngine.enableDisableLink(user, link); + } + + // No.6 enableDisableLink test end + + // No.7 readJob test start + /** + * sqoop read any job success + */ + @Test + public void readJobAnyWithAllPermissions() { + String user = SQOOP; + String job = getRandomJobName(); + AuthorizationEngine.readJob(user, job); + } + + /** + * zhangqiang read any job success + */ + @Test + public void readJobAnyAsCreater() { + String user = ZHANGQIANG; + String job = getRandomJobName(); + AuthorizationEngine.readJob(user, job); + } + + /** + * yuwen read oracle2hdfs-job success + */ + @Test + public void readJobOracle2HdfsWithReadPermission() { + String user = YUWEN; + String job = ORACLE2HDFS_JOB; + AuthorizationEngine.readJob(user, job); + } + + /** + * yuwen read any job failed + */ + @Test(expected = SqoopException.class) + public void readJobAnyWithoutPermission() { + String user = YUWEN; + String job = getRandomJobName(); + AuthorizationEngine.readJob(user, job); + } + + // No.7 readJob test end + + // No.8 createJob test start + /** + * sqoop create job by any two links success + */ + @Test + public void createJobByAnyTwoLinksWithAllPermissions() { + String user = SQOOP; + String link1 = getRandomLinkName(); + String link2 = getRandomLinkName(); + AuthorizationEngine.createJob(user, link1, link2); + } + + /** + * zhangqiang create job by any two links success + */ + @Test + public void createJobByAnyTwoLinksAsCreater() { + String user = ZHANGQIANG; + String link1 = getRandomLinkName(); + String link2 = getRandomLinkName(); + AuthorizationEngine.createJob(user, link1, link2); + } + + /** + * yuwen create job from oracle-link to hdfs-link success + */ + @Test + public void createJobFromOracle2HdfsLinkWithReadPermission() { + String user = YUWEN; + String link1 = ORACLE_LINK; + String link2 = HDFS_LINK; + AuthorizationEngine.createJob(user, link1, link2); + } + + /** + * yuwen create job from oracle-link to any link failed + */ + @Test(expected = SqoopException.class) + public void createJobFromOracle2AnyLinkWithoutPermission() { + String user = YUWEN; + String link1 = ORACLE_LINK; + String link2 = getRandomLinkName(); + AuthorizationEngine.createJob(user, link1, link2); + } + + /** + * yuwen create job by any two links failed + */ + @Test(expected = SqoopException.class) + public void createJobByAnyTwoLinksWithoutPermission() { + String user = YUWEN; + String link1 = getRandomLinkName(); + String link2 = getRandomLinkName(); + AuthorizationEngine.createJob(user, link1, link2); + } + + // No.8 createJob test end + + // No.9 updateJob test start + /** + * sqoop update any job created by any two links success + */ + @Test + public void updateJobAnyByAnyTwoLinksWithAllPermissions() { + String user = SQOOP; + String link1 = getRandomLinkName(); + String link2 = getRandomLinkName(); + String job = getRandomJobName(); + AuthorizationEngine.updateJob(user, link1, link2, job); + } + + /** + * zhangqiang update any job created by any two links success + */ + @Test + public void updateJobAnyByAnyTwoLinksAsCreater() { + String user = ZHANGQIANG; + String link1 = getRandomLinkName(); + String link2 = getRandomLinkName(); + String job = getRandomJobName(); + AuthorizationEngine.updateJob(user, link1, link2, job); + } + + /** + * yuwen update oracle2hdfs-job created from oracle-link to hdfs-link + * success + */ + @Test + public void updateJobOracle2HdfsByTwoLinksWithWritePermission() { + String user = YUWEN; + String link1 = ORACLE_LINK; + String link2 = HDFS_LINK; + String job = ORACLE2HDFS_JOB; + AuthorizationEngine.updateJob(user, link1, link2, job); + } + + /** + * yuwen update oracle2hdfs-job created from new_oracle-link to hdfs-link + * failed + */ + @Test(expected = SqoopException.class) + public void updateJobOracle2HdfsByTwoLinksWithoutPermission() { + String user = YUWEN; + String link1 = "new_" + ORACLE_LINK; + String link2 = HDFS_LINK; + String job = ORACLE2HDFS_JOB; + AuthorizationEngine.updateJob(user, link1, link2, job); + } + + /** + * yuwen update any job created from oracle-link to hdfs-link failed + */ + @Test(expected = SqoopException.class) + public void updateJobAnyByTwoLinksWithoutPermission() { + String user = YUWEN; + String link1 = ORACLE_LINK; + String link2 = HDFS_LINK; + String job = getRandomJobName(); + AuthorizationEngine.updateJob(user, link1, link2, job); + } + + /** + * yuwen update any job created from oracle-link to hdfs-link failed + */ + @Test(expected = SqoopException.class) + public void updateJobAnyByAnyLinksWithoutPermission() { + String user = YUWEN; + String link1 = getRandomLinkName(); + String link2 = getRandomLinkName(); + String job = getRandomJobName(); + AuthorizationEngine.updateJob(user, link1, link2, job); + } + + // No.9 updateJob test end + + // No.10 deleteJob test start + /** + * sqoop delete any job success + */ + @Test + public void deleteJobAnyWithAllPermissions() { + String user = SQOOP; + String job = getRandomJobName(); + AuthorizationEngine.deleteJob(user, job); + } + + /** + * zhangqiang delete any job success + */ + @Test + public void deleteJobAnyAsCreater() { + String user = ZHANGQIANG; + String job = getRandomJobName(); + AuthorizationEngine.deleteJob(user, job); + } + + /** + * yuwen delete oracle2hdfs-job success + */ + @Test + public void deleteJobOracle2HdfsWithWritePermission() { + String user = YUWEN; + String job = ORACLE2HDFS_JOB; + AuthorizationEngine.deleteJob(user, job); + } + + /** + * yuwen delete any job failed + */ + @Test(expected = SqoopException.class) + public void deleteJobAnyWithoutPermission() { + String user = YUWEN; + String job = getRandomJobName(); + AuthorizationEngine.deleteJob(user, job); + } + + // No.10 deleteJob test end + + // No.11 enableDisableJob test start + /** + * sqoop enable disable any job success + */ + @Test + public void enableDisableJobAnyWithAllPermissions() { + String user = SQOOP; + String job = getRandomJobName(); + AuthorizationEngine.enableDisableJob(user, job); + } + + /** + * zhangqiang enable disable any job success + */ + @Test + public void enableDisableJobAnyAsCreater() { + String user = ZHANGQIANG; + String job = getRandomJobName(); + AuthorizationEngine.enableDisableJob(user, job); + } + + /** + * yuwen enable disable oracle2hdfs-job success + */ + @Test + public void enableDisableJobOracle2HdfsWithWritePermission() { + String user = YUWEN; + String job = ORACLE2HDFS_JOB; + AuthorizationEngine.enableDisableJob(user, job); + } + + /** + * yuwen enable disable any job failed + */ + @Test(expected = SqoopException.class) + public void enableDisableJobAnyWithoutPermission() { + String user = YUWEN; + String job = getRandomJobName(); + AuthorizationEngine.enableDisableJob(user, job); + } + + // No.11 enableDisableJob test end + + // No.12 startJob test start + /** + * sqoop start any job success + */ + @Test + public void startJobAnyWithAllPermissions() { + String user = SQOOP; + String job = getRandomJobName(); + AuthorizationEngine.startJob(user, job); + } + + /** + * zhangqiang start any job success + */ + @Test + public void startJobAnyAsCreater() { + String user = ZHANGQIANG; + String job = getRandomJobName(); + AuthorizationEngine.startJob(user, job); + } + + /** + * yuwen start oracle2hdfs-job success + */ + @Test + public void startJobOracle2HdfsWithWritePermission() { + String user = YUWEN; + String job = ORACLE2HDFS_JOB; + AuthorizationEngine.startJob(user, job); + } + + /** + * yuwen start any job failed + */ + @Test(expected = SqoopException.class) + public void startJobAnyWithoutPermission() { + String user = YUWEN; + String job = getRandomJobName(); + AuthorizationEngine.startJob(user, job); + } + + // No.12 startJob test end + + // No.13 stopJob test start + /** + * sqoop stop any job success + */ + @Test + public void stopJobAnyWithAllPermissions() { + String user = SQOOP; + String job = getRandomJobName(); + AuthorizationEngine.stopJob(user, job); + } + + /** + * zhangqiang start any job success + */ + @Test + public void stopJobAnyAsCreater() { + String user = ZHANGQIANG; + String job = getRandomJobName(); + AuthorizationEngine.stopJob(user, job); + } + + /** + * yuwen stop oracle2hdfs-job success + */ + @Test + public void stopJobOracle2HdfsWithWritePermission() { + String user = YUWEN; + String job = ORACLE2HDFS_JOB; + AuthorizationEngine.stopJob(user, job); + } + + /** + * yuwen stop any job failed + */ + @Test(expected = SqoopException.class) + public void stopJobAnyWithoutPermission() { + String user = YUWEN; + String job = getRandomJobName(); + AuthorizationEngine.stopJob(user, job); + } + + // No.13 stopJob test end + + // No.14 statusJob test start + /** + * sqoop status any job success + */ + @Test + public void statusJobAnyWithAllPermissions() { + String user = SQOOP; + String job = getRandomJobName(); + AuthorizationEngine.statusJob(user, job); + } + + /** + * zhangqiang status any job success + */ + @Test + public void statusJobAnyAsCreater() { + String user = ZHANGQIANG; + String job = getRandomJobName(); + AuthorizationEngine.statusJob(user, job); + } + + /** + * yuwen status oracle2hdfs-job success + */ + @Test + public void statusJobOracle2HdfsWithReadPermission() { + String user = YUWEN; + String job = ORACLE2HDFS_JOB; + AuthorizationEngine.statusJob(user, job); + } + + /** + * yuwen status status job failed + */ + @Test(expected = SqoopException.class) + public void statusJobAnyWithoutPermission() { + String user = YUWEN; + String job = getRandomJobName(); + AuthorizationEngine.statusJob(user, job); + } + + // No.14 statusJob test end + +} http://git-wip-us.apache.org/repos/asf/ranger/blob/44691b55/plugin-sqoop/src/test/resources/log4j.properties ---------------------------------------------------------------------- diff --git a/plugin-sqoop/src/test/resources/log4j.properties b/plugin-sqoop/src/test/resources/log4j.properties new file mode 100644 index 0000000..f7ab2ba --- /dev/null +++ b/plugin-sqoop/src/test/resources/log4j.properties @@ -0,0 +1,34 @@ +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +##-- To prevent junits from cluttering the build run by default all test runs send output to null appender +log4j.appender.devnull=org.apache.log4j.varia.NullAppender +ranger.root.logger=FATAL,devnull + +##-- uncomment the following line during during development/debugging so see debug messages during test run to be emitted to console +# ranger.root.logger=DEBUG,console +log4j.rootLogger=${ranger.root.logger} + +# Logging Threshold +log4j.threshold=ALL + +# +# console +# Add "console" to rootlogger above if you want to use this +# +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.err +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d{ISO8601} %-5p [%t] %c{2}: %m%n http://git-wip-us.apache.org/repos/asf/ranger/blob/44691b55/plugin-sqoop/src/test/resources/ranger-sqoop-security.xml ---------------------------------------------------------------------- diff --git a/plugin-sqoop/src/test/resources/ranger-sqoop-security.xml b/plugin-sqoop/src/test/resources/ranger-sqoop-security.xml new file mode 100644 index 0000000..24b3155 --- /dev/null +++ b/plugin-sqoop/src/test/resources/ranger-sqoop-security.xml @@ -0,0 +1,60 @@ +<?xml version="1.0"?> +<!-- + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +--> +<?xml-stylesheet type="text/xsl" href="configuration.xsl"?> +<configuration xmlns:xi="http://www.w3.org/2001/XInclude"> + <property> + <name>ranger.plugin.sqoop.policy.rest.url</name> + <value>http://localhost:6080</value> + <description> + URL to Ranger Admin + </description> + </property> + + <property> + <name>ranger.plugin.sqoop.service.name</name> + <value>sqoopTest</value> + <description> + Name of the Ranger service containing policies for this SampleApp instance + </description> + </property> + + <property> + <name>ranger.plugin.sqoop.policy.source.impl</name> + <value>org.apache.ranger.authorization.sqoop.authorizer.RangerAdminClientImpl</value> + <description> + Policy source. + </description> + </property> + + <property> + <name>ranger.plugin.sqoop.policy.pollIntervalMs</name> + <value>30000</value> + <description> + How often to poll for changes in policies? + </description> + </property> + + <property> + <name>ranger.plugin.sqoop.policy.cache.dir</name> + <value>./target</value> + <description> + Directory where Ranger policies are cached after successful retrieval from the source + </description> + </property> + +</configuration> http://git-wip-us.apache.org/repos/asf/ranger/blob/44691b55/plugin-sqoop/src/test/resources/sqoop-policies.json ---------------------------------------------------------------------- diff --git a/plugin-sqoop/src/test/resources/sqoop-policies.json b/plugin-sqoop/src/test/resources/sqoop-policies.json new file mode 100644 index 0000000..e622578 --- /dev/null +++ b/plugin-sqoop/src/test/resources/sqoop-policies.json @@ -0,0 +1,539 @@ +{ + "serviceName": "sqoopTest", + "serviceId": 2, + "policyVersion": 13, + "policyUpdateTime": "20171103-17:19:53.000-+0800", + "policies": [ + { + "service": "sqoopTest", + "name": "all - link", + "policyType": 0, + "description": "Policy for all - link", + "isAuditEnabled": true, + "resources": { + "link": { + "values": [ + "*" + ], + "isExcludes": false, + "isRecursive": false + } + }, + "policyItems": [ + { + "accesses": [ + { + "type": "READ", + "isAllowed": true + }, + { + "type": "WRITE", + "isAllowed": true + } + ], + "users": [ + "sqoop" + ], + "groups": [], + "conditions": [], + "delegateAdmin": true + } + ], + "denyPolicyItems": [], + "allowExceptions": [], + "denyExceptions": [], + "dataMaskPolicyItems": [], + "rowFilterPolicyItems": [], + "id": 16, + "guid": "2e493ce4-6da3-45c2-a34f-72334834c711", + "isEnabled": true, + "version": 1 + }, + { + "service": "sqoopTest", + "name": "all - job", + "policyType": 0, + "description": "Policy for all - job", + "isAuditEnabled": true, + "resources": { + "job": { + "values": [ + "*" + ], + "isExcludes": false, + "isRecursive": false + } + }, + "policyItems": [ + { + "accesses": [ + { + "type": "READ", + "isAllowed": true + }, + { + "type": "WRITE", + "isAllowed": true + } + ], + "users": [ + "sqoop" + ], + "groups": [], + "conditions": [], + "delegateAdmin": true + } + ], + "denyPolicyItems": [], + "allowExceptions": [], + "denyExceptions": [], + "dataMaskPolicyItems": [], + "rowFilterPolicyItems": [], + "id": 17, + "guid": "9f328169-0801-4733-9d0f-d2fe28442010", + "isEnabled": true, + "version": 1 + }, + { + "service": "sqoopTest", + "name": "all - connector", + "policyType": 0, + "description": "Policy for all - connector", + "isAuditEnabled": true, + "resources": { + "connector": { + "values": [ + "*" + ], + "isExcludes": false, + "isRecursive": false + } + }, + "policyItems": [ + { + "accesses": [ + { + "type": "READ", + "isAllowed": true + }, + { + "type": "WRITE", + "isAllowed": true + } + ], + "users": [ + "sqoop" + ], + "groups": [], + "conditions": [], + "delegateAdmin": true + } + ], + "denyPolicyItems": [], + "allowExceptions": [], + "denyExceptions": [], + "dataMaskPolicyItems": [], + "rowFilterPolicyItems": [], + "id": 18, + "guid": "d715946d-cd6f-4e04-9203-5a7fc65b9726", + "isEnabled": true, + "version": 1 + }, + { + "service": "sqoopTest", + "name": "accessKafkaConnector", + "policyType": 0, + "description": "", + "isAuditEnabled": true, + "resources": { + "connector": { + "values": [ + "kafka-connector" + ], + "isExcludes": false, + "isRecursive": false + } + }, + "policyItems": [ + { + "accesses": [ + { + "type": "READ", + "isAllowed": true + } + ], + "users": [ + "zhangqiang" + ], + "groups": [], + "conditions": [], + "delegateAdmin": false + } + ], + "denyPolicyItems": [], + "allowExceptions": [], + "denyExceptions": [], + "dataMaskPolicyItems": [], + "rowFilterPolicyItems": [], + "id": 19, + "guid": "a2b8f85f-27c7-4ba1-ba8c-6213c6172636", + "isEnabled": true, + "version": 3 + }, + { + "service": "sqoopTest", + "name": "accessOracleJdbcConnector", + "policyType": 0, + "description": "", + "isAuditEnabled": true, + "resources": { + "connector": { + "values": [ + "oracle-jdbc-connector" + ], + "isExcludes": false, + "isRecursive": false + } + }, + "policyItems": [ + { + "accesses": [ + { + "type": "READ", + "isAllowed": true + }, + { + "type": "WRITE", + "isAllowed": true + } + ], + "users": [ + "yuwen" + ], + "groups": [], + "conditions": [], + "delegateAdmin": false + } + ], + "denyPolicyItems": [], + "allowExceptions": [], + "denyExceptions": [], + "dataMaskPolicyItems": [], + "rowFilterPolicyItems": [], + "id": 22, + "guid": "91f287f1-ce44-435b-bd8b-b0285a20f9c5", + "isEnabled": true, + "version": 1 + }, + { + "service": "sqoopTest", + "name": "accessHdfsConnector", + "policyType": 0, + "description": "", + "isAuditEnabled": true, + "resources": { + "connector": { + "values": [ + "hdfs-connector" + ], + "isExcludes": false, + "isRecursive": false + } + }, + "policyItems": [ + { + "accesses": [ + { + "type": "READ", + "isAllowed": true + }, + { + "type": "WRITE", + "isAllowed": true + } + ], + "users": [ + "yuwen" + ], + "groups": [], + "conditions": [], + "delegateAdmin": false + } + ], + "denyPolicyItems": [], + "allowExceptions": [], + "denyExceptions": [], + "dataMaskPolicyItems": [], + "rowFilterPolicyItems": [], + "id": 23, + "guid": "454bc74e-e012-4ebd-a4bf-375ed8b2d98d", + "isEnabled": true, + "version": 1 + }, + { + "service": "sqoopTest", + "name": "accessOracleLink", + "policyType": 0, + "description": "", + "isAuditEnabled": true, + "resources": { + "link": { + "values": [ + "oracle-link" + ], + "isExcludes": false, + "isRecursive": false + } + }, + "policyItems": [ + { + "accesses": [ + { + "type": "READ", + "isAllowed": true + }, + { + "type": "WRITE", + "isAllowed": true + } + ], + "users": [ + "yuwen" + ], + "groups": [], + "conditions": [], + "delegateAdmin": false + } + ], + "denyPolicyItems": [], + "allowExceptions": [], + "denyExceptions": [], + "dataMaskPolicyItems": [], + "rowFilterPolicyItems": [], + "id": 24, + "guid": "3154e0c9-ccd2-4566-a730-5ea51ab21404", + "isEnabled": true, + "version": 1 + }, + { + "service": "sqoopTest", + "name": "accessHdfsLink", + "policyType": 0, + "description": "", + "isAuditEnabled": true, + "resources": { + "link": { + "values": [ + "hdfs-link" + ], + "isExcludes": false, + "isRecursive": false + } + }, + "policyItems": [ + { + "accesses": [ + { + "type": "READ", + "isAllowed": true + }, + { + "type": "WRITE", + "isAllowed": true + } + ], + "users": [ + "yuwen" + ], + "groups": [], + "conditions": [], + "delegateAdmin": false + } + ], + "denyPolicyItems": [], + "allowExceptions": [], + "denyExceptions": [], + "dataMaskPolicyItems": [], + "rowFilterPolicyItems": [], + "id": 25, + "guid": "2e11dd2d-4da4-4dc4-89fa-ce8e4f5fbc1c", + "isEnabled": true, + "version": 2 + }, + { + "service": "sqoopTest", + "name": "accessOracle2HdfsJob", + "policyType": 0, + "description": "", + "isAuditEnabled": true, + "resources": { + "job": { + "values": [ + "oracle2hdfs-job" + ], + "isExcludes": false, + "isRecursive": false + } + }, + "policyItems": [ + { + "accesses": [ + { + "type": "READ", + "isAllowed": true + }, + { + "type": "WRITE", + "isAllowed": true + } + ], + "users": [ + "yuwen" + ], + "groups": [], + "conditions": [], + "delegateAdmin": false + } + ], + "denyPolicyItems": [], + "allowExceptions": [], + "denyExceptions": [], + "dataMaskPolicyItems": [], + "rowFilterPolicyItems": [], + "id": 26, + "guid": "81d30427-d164-47f6-beac-5d96bd183132", + "isEnabled": true, + "version": 1 + } + ], + "serviceDef": { + "name": "sqoop", + "implClass": "org.apache.ranger.services.sqoop.RangerServiceSqoop", + "label": "SQOOP", + "description": "SQOOP", + "options": { + "enableDenyAndExceptionsInPolicies": "false" + }, + "configs": [ + { + "itemId": 1, + "name": "username", + "type": "string", + "mandatory": true, + "validationRegEx": "", + "validationMessage": "", + "uiHint": "", + "label": "Username" + }, + { + "itemId": 2, + "name": "sqoop.url", + "type": "string", + "mandatory": true, + "defaultValue": "", + "validationRegEx": "", + "validationMessage": "", + "uiHint": "", + "label": "Sqoop URL" + } + ], + "resources": [ + { + "itemId": 1, + "name": "connector", + "type": "string", + "level": 10, + "mandatory": true, + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": false, + "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "true" + }, + "validationRegEx": "", + "validationMessage": "", + "uiHint": "", + "label": "Connector", + "description": "Sqoop Connector", + "accessTypeRestrictions": [] + }, + { + "itemId": 2, + "name": "link", + "type": "string", + "level": 10, + "mandatory": true, + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": false, + "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "true" + }, + "validationRegEx": "", + "validationMessage": "", + "uiHint": "", + "label": "Link", + "description": "Sqoop Link", + "accessTypeRestrictions": [] + }, + { + "itemId": 3, + "name": "job", + "type": "string", + "level": 10, + "mandatory": true, + "lookupSupported": true, + "recursiveSupported": false, + "excludesSupported": false, + "matcher": "org.apache.ranger.plugin.resourcematcher.RangerDefaultResourceMatcher", + "matcherOptions": { + "wildCard": "true", + "ignoreCase": "true" + }, + "validationRegEx": "", + "validationMessage": "", + "uiHint": "", + "label": "Job", + "description": "Sqoop Job", + "accessTypeRestrictions": [] + } + ], + "accessTypes": [ + { + "itemId": 1, + "name": "READ", + "label": "READ", + "impliedGrants": [] + }, + { + "itemId": 2, + "name": "WRITE", + "label": "WRITE", + "impliedGrants": [] + } + ], + "policyConditions": [], + "contextEnrichers": [], + "enums": [], + "dataMaskDef": { + "maskTypes": [], + "accessTypes": [], + "resources": [] + }, + "rowFilterDef": { + "accessTypes": [], + "resources": [] + }, + "id": 14, + "guid": "6c63d385-5876-4a4c-ac4a-3b99b50ed600", + "isEnabled": true, + "createTime": "20171023-12:10:59.000-+0800", + "updateTime": "20171023-12:10:59.000-+0800", + "version": 1 + }, + "auditMode": "audit-default" +} \ No newline at end of file http://git-wip-us.apache.org/repos/asf/ranger/blob/44691b55/plugin-sqoop/src/test/resources/sqoop.properties ---------------------------------------------------------------------- diff --git a/plugin-sqoop/src/test/resources/sqoop.properties b/plugin-sqoop/src/test/resources/sqoop.properties new file mode 100644 index 0000000..be8c278 --- /dev/null +++ b/plugin-sqoop/src/test/resources/sqoop.properties @@ -0,0 +1,68 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# +# Sqoop configuration file used by the built in configuration +# provider: org.apache.sqoop.core.PropertiesConfigurationProvider. +# This file must reside in the system configuration directory +# which is specified by the system property "sqoop.config.dir" +# and must be called sqoop.properties. +# +# NOTE: Tokens specified in this file that are marked by a +# leading and trailing '@' characters should be replaced by +# their appropriate values. For example, the token @LOGDIR@ +# should be replaced appropriately. +# +# The following tokens are used in this configuration file: +# +# LOGDIR +# The absolute path to the directory where system genearated +# log files will be kept. +# +# BASEDIR +# The absolute path to the directory where Sqoop 2 is installed +# + +# +# Authentication configuration +# +#org.apache.sqoop.security.authentication.type=SIMPLE +#org.apache.sqoop.security.authentication.handler=org.apache.sqoop.security.authentication.SimpleAuthenticationHandler +#org.apache.sqoop.security.authentication.anonymous=true +#org.apache.sqoop.security.authentication.type=KERBEROS +#org.apache.sqoop.security.authentication.handler=org.apache.sqoop.security.authentication.KerberosAuthenticationHandler +#org.apache.sqoop.security.authentication.kerberos.principal=sqoop/_HOST@NOVALOCAL +#org.apache.sqoop.security.authentication.kerberos.keytab=/home/kerberos/sqoop.keytab +#org.apache.sqoop.security.authentication.kerberos.http.principal=HTTP/_HOST@NOVALOCAL +#org.apache.sqoop.security.authentication.kerberos.http.keytab=/home/kerberos/sqoop.keytab +#org.apache.sqoop.security.authentication.enable.doAs=true +#org.apache.sqoop.security.authentication.proxyuser.#USER#.users=* +#org.apache.sqoop.security.authentication.proxyuser.#USER#.groups=* +#org.apache.sqoop.security.authentication.proxyuser.#USER#.hosts=* + +# Default user, default value is "sqoop.anonymous.user" +#org.apache.sqoop.security.authentication.default.user= + +# +# Authorization configuration +# +#org.apache.sqoop.security.authorization.handler=org.apache.sqoop.security.authorization.DefaultAuthorizationHandler +#org.apache.sqoop.security.authorization.access_controller=org.apache.sqoop.security.authorization.DefaultAuthorizationAccessController +#org.apache.sqoop.security.authorization.validator=org.apache.sqoop.security.authorization.DefaultAuthorizationValidator +#org.apache.sqoop.security.authorization.authentication_provider=org.apache.sqoop.security.authorization.DefaultAuthenticationProvider +#org.apache.sqoop.security.authorization.server_name=SqoopServer1 +org.apache.sqoop.security.authorization.validator=org.apache.ranger.authorization.sqoop.authorizer.RangerSqoopAuthorizer http://git-wip-us.apache.org/repos/asf/ranger/blob/44691b55/plugin-sqoop/src/test/resources/sqoop_bootstrap.properties ---------------------------------------------------------------------- diff --git a/plugin-sqoop/src/test/resources/sqoop_bootstrap.properties b/plugin-sqoop/src/test/resources/sqoop_bootstrap.properties new file mode 100644 index 0000000..d7bfb5e --- /dev/null +++ b/plugin-sqoop/src/test/resources/sqoop_bootstrap.properties @@ -0,0 +1,37 @@ +# +# Licensed to the Apache Software Foundation (ASF) under one or more +# contributor license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright ownership. +# The ASF licenses this file to You under the Apache License, Version 2.0 +# (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +# +# Bootstrap configuration for Sqoop. This file is picked up +# from the directory specified by the system property +# "sqoop.config.dir". Sqoop will not boot up if this property +# is not set, or if there is no file by the name +# "sqoop_bootstrap.properties" in the directory pointed by +# this system property. +# + +# +# Specifies the configuration provider to be used. +# This is a required configuration that must be specified. +# The default value for this is: +# org.apache.sqoop.core.PropertiesConfigurationProvider +# The PropertiesConfigurationProvider expects the system +# configuration directory to be specified by the system +# property "sqoop.config.dir", and it must contain a file +# by the name "sqoop.properties". +# +sqoop.config.provider=org.apache.sqoop.core.PropertiesConfigurationProvider \ No newline at end of file
