This is an automated email from the ASF dual-hosted git repository.
jonwei pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/druid.git
The following commit(s) were added to refs/heads/master by this push:
new 553f5c8 Ldap integration tests (#10901)
553f5c8 is described below
commit 553f5c8570970a951cfef35b60dd5cc217999587
Author: zachjsh <[email protected]>
AuthorDate: Tue Feb 23 16:29:57 2021 -0500
Ldap integration tests (#10901)
* Add integration tests for ldap extension
* * refactor
* * add ldap-security integration test to travis
* * fix license error
* * Fix failing other integration test
* * break up large tests
* refactor
* address review comments
* * fix intellij inspections failure
* * remove dead code
---
.travis.yml | 20 +-
integration-tests/docker/docker-compose.base.yml | 17 +
.../docker/docker-compose.ldap-security.yml | 132 ++++
integration-tests/docker/druid.sh | 2 +-
.../docker/environment-configs/common-ldap | 80 +++
.../docker/environment-configs/overlord | 1 -
.../docker/ldap-configs/bootstrap.ldif | 138 +++++
.../docker/test-data/ldap-security-sample-data.sql | 17 +
integration-tests/script/docker_compose_args.sh | 6 +-
.../java/org/apache/druid/tests/TestNGGroup.java | 5 +
.../security/AbstractAuthConfigurationTest.java | 471 +++++++++++++++
.../security/ITBasicAuthConfigurationTest.java | 666 ++++++---------------
.../security/ITBasicAuthLdapConfigurationTest.java | 541 +++++++++++++++++
13 files changed, 1610 insertions(+), 486 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 9fbc4a6..355e87c 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -487,6 +487,15 @@ jobs:
script: *run_integration_test
after_failure: *integration_test_diags
+ - &integration_ldap_security
+ name: "(Compile=openjdk8, Run=openjdk8) ldap security integration test"
+ stage: Tests - phase 2
+ jdk: openjdk8
+ services: *integration_test_services
+ env: TESTNG_GROUPS='-Dgroups=ldap-security'
JVM_RUNTIME='-Djvm.runtime=8' USE_INDEXER='middleManager'
+ script: *run_integration_test
+ after_failure: *integration_test_diags
+
- &integration_realtime_index
name: "(Compile=openjdk8, Run=openjdk8) realtime index integration test"
stage: Tests - phase 2
@@ -527,13 +536,13 @@ jobs:
stage: Tests - phase 2
jdk: openjdk8
services: *integration_test_services
- env:
TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,realtime-index,security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-h
[...]
+ env:
TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,had
[...]
script: *run_integration_test
after_failure: *integration_test_diags
- <<: *integration_tests
name: "(Compile=openjdk8, Run=openjdk8) other integration tests with
Indexer"
- env:
TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,realtime-index,security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-h
[...]
+ env:
TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,had
[...]
- <<: *integration_tests
name: "(Compile=openjdk8, Run=openjdk8) leadership and high availability
integration tests"
@@ -588,6 +597,11 @@ jobs:
jdk: openjdk8
env: TESTNG_GROUPS='-Dgroups=security' JVM_RUNTIME='-Djvm.runtime=11'
USE_INDEXER='middleManager'
+ - <<: *integration_ldap_security
+ name: "(Compile=openjdk8, Run=openjdk11) ldap security integration test"
+ jdk: openjdk8
+ env: TESTNG_GROUPS='-Dgroups=ldap-security'
JVM_RUNTIME='-Djvm.runtime=11' USE_INDEXER='middleManager'
+
- <<: *integration_realtime_index
name: "(Compile=openjdk8, Run=openjdk11) realtime index integration test"
jdk: openjdk8
@@ -606,7 +620,7 @@ jobs:
- <<: *integration_tests
name: "(Compile=openjdk8, Run=openjdk11) other integration test"
jdk: openjdk8
- env:
TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,realtime-index,security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,hadoop-azure-to-h
[...]
+ env:
TESTNG_GROUPS='-DexcludedGroups=batch-index,input-format,input-source,perfect-rollup-parallel-batch-index,kafka-index,query,query-retry,realtime-index,security,ldap-security,s3-deep-storage,gcs-deep-storage,azure-deep-storage,hdfs-deep-storage,s3-ingestion,kinesis-index,kinesis-data-format,kafka-transactional-index,kafka-index-slow,kafka-transactional-index-slow,kafka-data-format,hadoop-s3-to-s3-deep-storage,hadoop-s3-to-hdfs-deep-storage,hadoop-azure-to-azure-deep-storage,had
[...]
- <<: *integration_tests
name: "(Compile=openjdk8, Run=openjdk11) leadership and high
availability integration tests"
diff --git a/integration-tests/docker/docker-compose.base.yml
b/integration-tests/docker/docker-compose.base.yml
index dfe6b89..66b8526 100644
--- a/integration-tests/docker/docker-compose.base.yml
+++ b/integration-tests/docker/docker-compose.base.yml
@@ -356,3 +356,20 @@ services:
env_file:
- ./environment-configs/common
- ./environment-configs/router-custom-check-tls
+
+ druid-openldap:
+ image: osixia/openldap:1.4.0
+ container_name: druid-openldap
+ networks:
+ druid-it-net:
+ ipv4_address: 172.172.172.74
+ ports:
+ - 8389:389
+ - 8636:636
+ privileged: true
+ volumes:
+ -
./ldap-configs/bootstrap.ldif:/container/service/slapd/assets/config/bootstrap/ldif/bootstrap.ldif
+ - ${HOME}/shared:/shared
+ env_file:
+ - ./environment-configs/common
+ command: --copy-service
diff --git a/integration-tests/docker/docker-compose.ldap-security.yml
b/integration-tests/docker/docker-compose.ldap-security.yml
new file mode 100644
index 0000000..53fdb1d
--- /dev/null
+++ b/integration-tests/docker/docker-compose.ldap-security.yml
@@ -0,0 +1,132 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+version: "2.2"
+services:
+ druid-openldap:
+ extends:
+ file: docker-compose.base.yml
+ service: druid-openldap
+ env_file:
+ - ./environment-configs/common-ldap
+
+ druid-zookeeper-kafka:
+ extends:
+ file: docker-compose.base.yml
+ service: druid-zookeeper-kafka
+ env_file:
+ - ./environment-configs/common-ldap
+ depends_on:
+ - druid-openldap
+
+ druid-metadata-storage:
+ extends:
+ file: docker-compose.base.yml
+ service: druid-metadata-storage
+ env_file:
+ - ./environment-configs/common-ldap
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ depends_on:
+ - druid-openldap
+ - druid-zookeeper-kafka
+
+ druid-coordinator:
+ extends:
+ file: docker-compose.base.yml
+ service: druid-coordinator
+ env_file:
+ - ./environment-configs/common-ldap
+ - ./environment-configs/coordinator
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ depends_on:
+ - druid-openldap
+ - druid-metadata-storage
+ - druid-zookeeper-kafka
+
+ druid-overlord:
+ extends:
+ file: docker-compose.base.yml
+ service: druid-overlord
+ env_file:
+ - ./environment-configs/common-ldap
+ - ./environment-configs/overlord
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ depends_on:
+ - druid-openldap
+ - druid-coordinator
+ - druid-metadata-storage
+ - druid-zookeeper-kafka
+
+ druid-historical:
+ extends:
+ file: docker-compose.base.yml
+ service: druid-historical
+ env_file:
+ - ./environment-configs/common-ldap
+ - ./environment-configs/historical
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ depends_on:
+ - druid-openldap
+ - druid-zookeeper-kafka
+
+ druid-middlemanager:
+ extends:
+ file: docker-compose.base.yml
+ service: druid-middlemanager
+ env_file:
+ - ./environment-configs/common-ldap
+ - ./environment-configs/middlemanager
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ depends_on:
+ - druid-openldap
+ - druid-zookeeper-kafka
+ - druid-overlord
+
+ druid-broker:
+ extends:
+ file: docker-compose.base.yml
+ service: druid-broker
+ env_file:
+ - ./environment-configs/common-ldap
+ - ./environment-configs/broker
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ depends_on:
+ - druid-openldap
+ - druid-coordinator
+ - druid-zookeeper-kafka
+ - druid-middlemanager
+ - druid-historical
+
+ druid-router:
+ extends:
+ file: docker-compose.base.yml
+ service: druid-router
+ env_file:
+ - ./environment-configs/common-ldap
+ - ./environment-configs/router
+ environment:
+ - DRUID_INTEGRATION_TEST_GROUP=${DRUID_INTEGRATION_TEST_GROUP}
+ depends_on:
+ - druid-openldap
+ - druid-zookeeper-kafka
+ - druid-coordinator
+ - druid-broker
+ - druid-overlord
\ No newline at end of file
diff --git a/integration-tests/docker/druid.sh
b/integration-tests/docker/druid.sh
index 9b756fc..a0b205d 100755
--- a/integration-tests/docker/druid.sh
+++ b/integration-tests/docker/druid.sh
@@ -85,7 +85,7 @@ setupData()
# The "query" and "security" test groups require data to be setup before
running the tests.
# In particular, they requires segments to be download from a pre-existing
s3 bucket.
# This is done by using the loadSpec put into metadatastore and s3
credientials set below.
- if [ "$DRUID_INTEGRATION_TEST_GROUP" = "query" ] || [
"$DRUID_INTEGRATION_TEST_GROUP" = "query-retry" ] || [
"$DRUID_INTEGRATION_TEST_GROUP" = "high-availability" ] || [
"$DRUID_INTEGRATION_TEST_GROUP" = "security" ]; then
+ if [ "$DRUID_INTEGRATION_TEST_GROUP" = "query" ] || [
"$DRUID_INTEGRATION_TEST_GROUP" = "query-retry" ] || [
"$DRUID_INTEGRATION_TEST_GROUP" = "high-availability" ] || [
"$DRUID_INTEGRATION_TEST_GROUP" = "security" ] || [
"$DRUID_INTEGRATION_TEST_GROUP" = "ldap-security" ]; then
# touch is needed because OverlayFS's copy-up operation breaks POSIX
standards. See https://github.com/docker/for-linux/issues/72.
find /var/lib/mysql -type f -exec touch {} \; && service mysql start \
&& cat /test-data/${DRUID_INTEGRATION_TEST_GROUP}-sample-data.sql |
mysql -u root druid && /etc/init.d/mysql stop
diff --git a/integration-tests/docker/environment-configs/common-ldap
b/integration-tests/docker/environment-configs/common-ldap
new file mode 100644
index 0000000..243f09d
--- /dev/null
+++ b/integration-tests/docker/environment-configs/common-ldap
@@ -0,0 +1,80 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#
+
+LANG=C.UTF-8
+LANGUAGE=C.UTF-8
+LC_ALL=C.UTF-8
+
+# JAVA OPTS
+COMMON_DRUID_JAVA_OPTS=-Duser.timezone=UTC -Dfile.encoding=UTF-8
-Dlog4j.configurationFile=/shared/docker/lib/log4j2.xml
-XX:+ExitOnOutOfMemoryError -XX:+HeapDumpOnOutOfMemoryError
-XX:HeapDumpPath=/tmp
+DRUID_DEP_LIB_DIR=/shared/hadoop_xml:/shared/docker/lib/*:/usr/local/druid/lib/mysql-connector-java.jar
+
+# Druid configs
+druid_extensions_loadList=[]
+druid_extensions_directory=/shared/docker/extensions
+druid_auth_authenticator_ldap_authorizerName=ldapauth
+druid_auth_authenticator_ldap_initialAdminPassword=priest
+druid_auth_authenticator_ldap_initialInternalClientPassword=warlock
+druid_auth_authenticator_ldap_type=basic
+druid_auth_authenticator_ldap_credentialsValidator_type=ldap
+druid_auth_authenticator_ldap_credentialsValidator_url=ldap://druid-openldap:389
+druid_auth_authenticator_ldap_credentialsValidator_bindUser=cn=admin,dc=example,dc=org
+druid_auth_authenticator_ldap_credentialsValidator_bindPassword=admin
+druid_auth_authenticator_ldap_credentialsValidator_baseDn=ou=Users,dc=example,dc=org
+druid_auth_authenticator_ldap_credentialsValidator_userSearch=(&(uid=%s)(objectClass=inetOrgPerson))
+druid_auth_authenticator_ldap_credentialsValidator_userAttribute=uid
+druid_auth_authenticatorChain=["ldap"]
+druid_auth_authorizer_ldapauth_type=basic
+druid_auth_authorizer_ldapauth_initialAdminUser=admin
+druid_auth_authorizer_ldapauth_initialAdminRole=admin
+druid_auth_authorizer_ldapauth_roleProvider_type=ldap
+druid_auth_authorizers=["ldapauth"]
+druid_client_https_certAlias=druid
+druid_client_https_keyManagerPassword=druid123
+druid_client_https_keyStorePassword=druid123
+druid_client_https_keyStorePath=/tls/server.jks
+druid_client_https_protocol=TLSv1.2
+druid_client_https_trustStoreAlgorithm=PKIX
+druid_client_https_trustStorePassword=druid123
+druid_client_https_trustStorePath=/tls/truststore.jks
+druid_enableTlsPort=true
+druid_escalator_authorizerName=ldapauth
+druid_escalator_internalClientPassword=warlock
+druid_escalator_internalClientUsername=druid_system
+druid_escalator_type=basic
+druid_lookup_numLookupLoadingThreads=1
+druid_server_http_numThreads=20
+# Allow OPTIONS method for ITBasicAuthConfigurationTest.testSystemSchemaAccess
+druid_server_http_allowedHttpMethods=["OPTIONS"]
+druid_server_https_certAlias=druid
+druid_server_https_keyManagerPassword=druid123
+druid_server_https_keyStorePassword=druid123
+druid_server_https_keyStorePath=/tls/server.jks
+druid_server_https_keyStoreType=jks
+druid_server_https_requireClientCertificate=true
+druid_server_https_trustStoreAlgorithm=PKIX
+druid_server_https_trustStorePassword=druid123
+druid_server_https_trustStorePath=/tls/truststore.jks
+druid_server_https_validateHostnames=true
+druid_zk_service_host=druid-zookeeper-kafka
+druid_auth_basic_common_maxSyncRetries=20
+druid_indexer_logs_directory=/shared/tasklogs
+druid_sql_enable=true
+druid_extensions_hadoopDependenciesDir=/shared/hadoop-dependencies
+druid_request_logging_type=slf4j
\ No newline at end of file
diff --git a/integration-tests/docker/environment-configs/overlord
b/integration-tests/docker/environment-configs/overlord
index 7d9b1e1..cb4f5f2 100644
--- a/integration-tests/docker/environment-configs/overlord
+++ b/integration-tests/docker/environment-configs/overlord
@@ -31,6 +31,5 @@ druid_metadata_storage_connector_user=druid
druid_metadata_storage_connector_password=diurd
druid_indexer_storage_type=metadata
druid_indexer_runner_type=remote
-druid_auth_authenticatorChain=["basic"]
druid_auth_basic_common_cacheDirectory=/tmp/authCache/overlord
druid_server_https_crlPath=/tls/revocations.crl
diff --git a/integration-tests/docker/ldap-configs/bootstrap.ldif
b/integration-tests/docker/ldap-configs/bootstrap.ldif
new file mode 100644
index 0000000..05591c0
--- /dev/null
+++ b/integration-tests/docker/ldap-configs/bootstrap.ldif
@@ -0,0 +1,138 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+dn: ou=Users,dc=example,dc=org
+objectClass: top
+objectClass: organizationalUnit
+ou: Users
+
+dn: ou=Groups,dc=example,dc=org
+objectClass: top
+objectClass: organizationalUnit
+ou: Groups
+
+dn: uid=admin,ou=Users,dc=example,dc=org
+uid: admin
+cn: admin
+sn: admin
+objectClass: top
+objectClass: posixAccount
+objectClass: inetOrgPerson
+homeDirectory: /home/admin
+uidNumber: 1
+gidNumber: 1
+userPassword: priest
+
+dn: uid=druid_system,ou=Users,dc=example,dc=org
+uid: druid_system
+cn: druid_system
+sn: druid_system
+objectClass: top
+objectClass: posixAccount
+objectClass: inetOrgPerson
+homeDirectory: /home/druid_system
+uidNumber: 2
+gidNumber: 2
+userPassword: warlock
+
+dn: cn=admin,ou=Groups,dc=example,dc=org
+objectClass: groupOfUniqueNames
+cn: admin
+description: Admin users
+uniqueMember: uid=admin,ou=Users,dc=example,dc=org
+uniqueMember: uid=druid_system,ou=Users,dc=example,dc=org
+
+dn: uid=datasourceOnlyUser,ou=Users,dc=example,dc=org
+uid: datasourceOnlyUser
+cn: datasourceOnlyUser
+sn: datasourceOnlyUser
+objectClass: top
+objectClass: posixAccount
+objectClass: inetOrgPerson
+homeDirectory: /home/datasourceOnlyUser
+uidNumber: 3
+gidNumber: 3
+userPassword: helloworld
+
+dn: cn=datasourceOnlyGroup,ou=Groups,dc=example,dc=org
+objectClass: groupOfUniqueNames
+cn: datasourceOnlyGroup
+description: datasourceOnlyGroup users
+uniqueMember: uid=datasourceOnlyUser,ou=Users,dc=example,dc=org
+
+dn: uid=datasourceWithStateUser,ou=Users,dc=example,dc=org
+uid: datasourceWithStateUser
+cn: datasourceWithStateUser
+sn: datasourceWithStateUser
+objectClass: top
+objectClass: posixAccount
+objectClass: inetOrgPerson
+homeDirectory: /home/datasourceWithStateUser
+uidNumber: 4
+gidNumber: 4
+userPassword: helloworld
+
+dn: cn=datasourceWithStateGroup,ou=Groups,dc=example,dc=org
+objectClass: groupOfUniqueNames
+cn: datasourceWithStateGroup
+description: datasourceWithStateGroup users
+uniqueMember: uid=datasourceWithStateUser,ou=Users,dc=example,dc=org
+
+dn: uid=stateOnlyUser,ou=Users,dc=example,dc=org
+uid: stateOnlyUser
+cn: stateOnlyUser
+sn: stateOnlyUser
+objectClass: top
+objectClass: posixAccount
+objectClass: inetOrgPerson
+homeDirectory: /home/stateOnlyUser
+uidNumber: 5
+gidNumber: 5
+userPassword: helloworld
+
+dn: cn=stateOnlyGroup,ou=Groups,dc=example,dc=org
+objectClass: groupOfUniqueNames
+cn: stateOnlyGroup
+description: stateOnlyGroup users
+uniqueMember: uid=stateOnlyUser,ou=Users,dc=example,dc=org
+
+dn: uid=druid,ou=Users,dc=example,dc=org
+uid: druid
+cn: druid
+sn: druid
+objectClass: top
+objectClass: posixAccount
+objectClass: inetOrgPerson
+homeDirectory: /home/druid
+uidNumber: 6
+gidNumber: 6
+userPassword: helloworld
+
+dn: cn=druidGroup,ou=Groups,dc=example,dc=org
+objectClass: groupOfUniqueNames
+cn: druidGroup
+description: druidGroup users
+uniqueMember: uid=druid,ou=Users,dc=example,dc=org
+
+dn: uid=stateOnlyNoLdapGroup,ou=Users,dc=example,dc=org
+uid: druid
+cn: druid
+sn: druid
+objectClass: top
+objectClass: posixAccount
+objectClass: inetOrgPerson
+homeDirectory: /home/druid
+uidNumber: 7
+gidNumber: 7
+userPassword: helloworld
diff --git a/integration-tests/docker/test-data/ldap-security-sample-data.sql
b/integration-tests/docker/test-data/ldap-security-sample-data.sql
new file mode 100644
index 0000000..f9edf75
--- /dev/null
+++ b/integration-tests/docker/test-data/ldap-security-sample-data.sql
@@ -0,0 +1,17 @@
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements. See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the "License"); you may not use this file except in compliance with
+-- the License. You may obtain a copy of the License at
+--
+-- http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+
+INSERT INTO druid_tasks (id, created_date, datasource, payload,
status_payload, active) VALUES ('index_auth_test_2030-04-30T01:13:31.893Z',
'2030-04-30T01:13:31.893Z', 'auth_test',
'{\"id\":\"index_auth_test_2030-04-30T01:13:31.893Z\",\"created_date\":\"2030-04-30T01:13:31.893Z\",\"datasource\":\"auth_test\",\"active\":0}',
'{\"id\":\"index_auth_test_2030-04-30T01:13:31.893Z\",\"status\":\"SUCCESS\",\"duration\":1}',
0);
+INSERT INTO druid_segments
(id,dataSource,created_date,start,end,partitioned,version,used,payload) VALUES
('auth_test_2012-12-29T00:00:00.000Z_2013-01-10T08:00:00.000Z_2013-01-10T08:13:47.830Z_v9','auth_test','2013-03-15T20:49:52.348Z','2012-12-29T00:00:00.000Z','2013-01-10T08:00:00.000Z',0,'2013-01-10T08:13:47.830Z_v9',1,'{\"dataSource\":\"auth_test\",\"interval\":\"2012-12-29T00:00:00.000Z/2013-01-10T08:00:00.000Z\",\"version\":\"2013-01-10T08:13:47.830Z_v9\",\"loadSpec\":{\"type\":\"s
[...]
diff --git a/integration-tests/script/docker_compose_args.sh
b/integration-tests/script/docker_compose_args.sh
index 73f3261..e43f88d 100644
--- a/integration-tests/script/docker_compose_args.sh
+++ b/integration-tests/script/docker_compose_args.sh
@@ -32,7 +32,7 @@ getComposeArgs()
if [ "$DRUID_INTEGRATION_TEST_INDEXER" = "indexer" ]
then
# Sanity check: cannot combine CliIndexer tests with security,
query-retry tests
- if [ "$DRUID_INTEGRATION_TEST_GROUP" = "security" ] || [
"$DRUID_INTEGRATION_TEST_GROUP" = "query-retry" ] || [
"$DRUID_INTEGRATION_TEST_GROUP" = "high-availability" ]
+ if [ "$DRUID_INTEGRATION_TEST_GROUP" = "security" ] || [
"$DRUID_INTEGRATION_TEST_GROUP" = "ldap-security" ] || [
"$DRUID_INTEGRATION_TEST_GROUP" = "query-retry" ] || [
"$DRUID_INTEGRATION_TEST_GROUP" = "high-availability" ]
then
echo "Cannot run test group '$DRUID_INTEGRATION_TEST_GROUP' with
CliIndexer"
exit 1
@@ -44,6 +44,10 @@ getComposeArgs()
then
# default + additional druid router (custom-check-tls, permissive-tls,
no-client-auth-tls)
echo "-f ${DOCKERDIR}/docker-compose.yml -f
${DOCKERDIR}/docker-compose.security.yml"
+ elif [ "$DRUID_INTEGRATION_TEST_GROUP" = "ldap-security" ]
+ then
+ # default + additional druid router (custom-check-tls, permissive-tls,
no-client-auth-tls)
+ echo "-f ${DOCKERDIR}/docker-compose.yml -f
${DOCKERDIR}/docker-compose.ldap-security.yml"
elif [ "$DRUID_INTEGRATION_TEST_GROUP" = "query-retry" ]
then
# default + additional historical modified for query retry test
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java
b/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java
index a59c0d4..7f8f231 100644
--- a/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java
+++ b/integration-tests/src/test/java/org/apache/druid/tests/TestNGGroup.java
@@ -64,6 +64,11 @@ public class TestNGGroup
public static final String SECURITY = "security";
/**
+ * This group can only be run individually using -Dgroups=ldap-security
since it requires specific test data setup.
+ */
+ public static final String LDAP_SECURITY = "ldap-security";
+
+ /**
* This group is not part of CI. To run this group, s3 configs/credentials
for your s3 must be provided in a file.
* The path of the file must then be pass to mvn with
-Doverride.config.path=<PATH_TO_FILE>
* See integration-tests/docker/environment-configs/override-examples/s3 for
env vars to provide.
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java
b/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java
new file mode 100644
index 0000000..65e8005
--- /dev/null
+++
b/integration-tests/src/test/java/org/apache/druid/tests/security/AbstractAuthConfigurationTest.java
@@ -0,0 +1,471 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.tests.security;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
+import com.google.inject.Inject;
+import org.apache.calcite.avatica.AvaticaSqlException;
+import org.apache.druid.common.config.NullHandling;
+import org.apache.druid.guice.annotations.Client;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.java.util.common.jackson.JacksonUtils;
+import org.apache.druid.java.util.common.logger.Logger;
+import org.apache.druid.java.util.http.client.CredentialedHttpClient;
+import org.apache.druid.java.util.http.client.HttpClient;
+import org.apache.druid.java.util.http.client.auth.BasicCredentials;
+import org.apache.druid.java.util.http.client.response.StatusResponseHolder;
+import org.apache.druid.sql.avatica.DruidAvaticaHandler;
+import org.apache.druid.testing.IntegrationTestingConfig;
+import org.apache.druid.testing.clients.CoordinatorResourceTestClient;
+import org.apache.druid.testing.utils.HttpUtil;
+import org.apache.druid.testing.utils.TestQueryHelper;
+import org.apache.druid.tests.indexer.AbstractIndexerTest;
+import org.jboss.netty.handler.codec.http.HttpMethod;
+import org.jboss.netty.handler.codec.http.HttpResponseStatus;
+import org.testng.Assert;
+
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.Statement;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Properties;
+
+public abstract class AbstractAuthConfigurationTest
+{
+ private static final Logger LOG = new
Logger(AbstractAuthConfigurationTest.class);
+
+ static final TypeReference<List<Map<String, Object>>>
SYS_SCHEMA_RESULTS_TYPE_REFERENCE =
+ new TypeReference<List<Map<String, Object>>>()
+ {
+ };
+
+ static final String SYSTEM_SCHEMA_SEGMENTS_RESULTS_RESOURCE =
+ "/results/auth_test_sys_schema_segments.json";
+ static final String SYSTEM_SCHEMA_SERVER_SEGMENTS_RESULTS_RESOURCE =
+ "/results/auth_test_sys_schema_server_segments.json";
+ static final String SYSTEM_SCHEMA_SERVERS_RESULTS_RESOURCE =
+ "/results/auth_test_sys_schema_servers.json";
+ static final String SYSTEM_SCHEMA_TASKS_RESULTS_RESOURCE =
+ "/results/auth_test_sys_schema_tasks.json";
+
+ static final String SYS_SCHEMA_SEGMENTS_QUERY =
+ "SELECT * FROM sys.segments WHERE datasource IN ('auth_test')";
+
+ static final String SYS_SCHEMA_SERVERS_QUERY =
+ "SELECT * FROM sys.servers WHERE tier IS NOT NULL";
+
+ static final String SYS_SCHEMA_SERVER_SEGMENTS_QUERY =
+ "SELECT * FROM sys.server_segments WHERE segment_id LIKE 'auth_test%'";
+
+ static final String SYS_SCHEMA_TASKS_QUERY =
+ "SELECT * FROM sys.tasks WHERE datasource IN ('auth_test')";
+
+ private static final String INVALID_NAME = "invalid%2Fname";
+
+ List<Map<String, Object>> adminSegments;
+ List<Map<String, Object>> adminTasks;
+ List<Map<String, Object>> adminServers;
+ List<Map<String, Object>> adminServerSegments;
+
+ @Inject
+ IntegrationTestingConfig config;
+
+ @Inject
+ ObjectMapper jsonMapper;
+
+ @Inject
+ @Client
+ HttpClient httpClient;
+
+ @Inject
+ CoordinatorResourceTestClient coordinatorClient;
+
+ HttpClient adminClient;
+ HttpClient datasourceOnlyUserClient;
+ HttpClient datasourceWithStateUserClient;
+ HttpClient stateOnlyUserClient;
+ HttpClient internalSystemClient;
+
+
+ void checkNodeAccess(HttpClient httpClient)
+ {
+ HttpUtil.makeRequest(httpClient, HttpMethod.GET,
config.getCoordinatorUrl() + "/status", null);
+ HttpUtil.makeRequest(httpClient, HttpMethod.GET, config.getOverlordUrl() +
"/status", null);
+ HttpUtil.makeRequest(httpClient, HttpMethod.GET, config.getBrokerUrl() +
"/status", null);
+ HttpUtil.makeRequest(httpClient, HttpMethod.GET, config.getHistoricalUrl()
+ "/status", null);
+ HttpUtil.makeRequest(httpClient, HttpMethod.GET, config.getRouterUrl() +
"/status", null);
+ }
+
+ void checkLoadStatus(HttpClient httpClient) throws Exception
+ {
+ checkLoadStatusSingle(httpClient, config.getCoordinatorUrl());
+ checkLoadStatusSingle(httpClient, config.getOverlordUrl());
+ checkLoadStatusSingle(httpClient, config.getBrokerUrl());
+ checkLoadStatusSingle(httpClient, config.getHistoricalUrl());
+ checkLoadStatusSingle(httpClient, config.getRouterUrl());
+ }
+
+ void testOptionsRequests(HttpClient httpClient)
+ {
+ HttpUtil.makeRequest(httpClient, HttpMethod.OPTIONS,
config.getCoordinatorUrl() + "/status", null);
+ HttpUtil.makeRequest(httpClient, HttpMethod.OPTIONS,
config.getOverlordUrl() + "/status", null);
+ HttpUtil.makeRequest(httpClient, HttpMethod.OPTIONS, config.getBrokerUrl()
+ "/status", null);
+ HttpUtil.makeRequest(httpClient, HttpMethod.OPTIONS,
config.getHistoricalUrl() + "/status", null);
+ HttpUtil.makeRequest(httpClient, HttpMethod.OPTIONS, config.getRouterUrl()
+ "/status", null);
+ }
+
+ void checkUnsecuredCoordinatorLoadQueuePath(HttpClient client)
+ {
+ HttpUtil.makeRequest(client, HttpMethod.GET, config.getCoordinatorUrl() +
"/druid/coordinator/v1/loadqueue", null);
+ }
+
+ void testAvaticaQuery(String url)
+ {
+ LOG.info("URL: " + url);
+ try {
+ Properties connectionProperties = new Properties();
+ connectionProperties.setProperty("user", "admin");
+ connectionProperties.setProperty("password", "priest");
+ Connection connection = DriverManager.getConnection(url,
connectionProperties);
+ Statement statement = connection.createStatement();
+ statement.setMaxRows(450);
+ String query = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS";
+ ResultSet resultSet = statement.executeQuery(query);
+ Assert.assertTrue(resultSet.next());
+ statement.close();
+ connection.close();
+ }
+ catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ void testAvaticaAuthFailure(String url) throws Exception
+ {
+ LOG.info("URL: " + url);
+ try {
+ Properties connectionProperties = new Properties();
+ connectionProperties.setProperty("user", "admin");
+ connectionProperties.setProperty("password", "wrongpassword");
+ Connection connection = DriverManager.getConnection(url,
connectionProperties);
+ Statement statement = connection.createStatement();
+ statement.setMaxRows(450);
+ String query = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS";
+ statement.executeQuery(query);
+ }
+ catch (AvaticaSqlException ase) {
+ Assert.assertEquals(
+ ase.getErrorMessage(),
+ getExpectedAvaticaAuthError()
+ );
+ return;
+ }
+ Assert.fail("Test failed, did not get AvaticaSqlException.");
+ }
+
+ private void checkLoadStatusSingle(
+ HttpClient httpClient,
+ String baseUrl) throws Exception
+ {
+ StatusResponseHolder holder = HttpUtil.makeRequest(
+ httpClient,
+ HttpMethod.GET,
+ baseUrl + "/druid-ext/basic-security/authentication/loadStatus",
+ null
+ );
+ String content = holder.getContent();
+ Map<String, Boolean> loadStatus = jsonMapper.readValue(content,
JacksonUtils.TYPE_REFERENCE_MAP_STRING_BOOLEAN);
+
+ String authenticatorName = getAuthenticatorName();
+ Assert.assertNotNull(loadStatus.get(getAuthenticatorName()));
+ Assert.assertTrue(loadStatus.get(authenticatorName));
+
+ holder = HttpUtil.makeRequest(
+ httpClient,
+ HttpMethod.GET,
+ baseUrl + "/druid-ext/basic-security/authorization/loadStatus",
+ null
+ );
+ content = holder.getContent();
+ loadStatus = jsonMapper.readValue(content,
JacksonUtils.TYPE_REFERENCE_MAP_STRING_BOOLEAN);
+
+ String authorizerName = getAuthorizerName();
+ Assert.assertNotNull(loadStatus.get(authorizerName));
+ Assert.assertTrue(loadStatus.get(authorizerName));
+ }
+
+ StatusResponseHolder makeSQLQueryRequest(
+ HttpClient httpClient,
+ String query,
+ HttpResponseStatus expectedStatus
+ ) throws Exception
+ {
+ Map<String, Object> queryMap = ImmutableMap.of(
+ "query", query
+ );
+ return HttpUtil.makeRequestWithExpectedStatus(
+ httpClient,
+ HttpMethod.POST,
+ config.getBrokerUrl() + "/druid/v2/sql",
+ jsonMapper.writeValueAsBytes(queryMap),
+ expectedStatus
+ );
+ }
+
+ void verifySystemSchemaQueryBase(
+ HttpClient client,
+ String query,
+ List<Map<String, Object>> expectedResults,
+ boolean isServerQuery
+ ) throws Exception
+ {
+ StatusResponseHolder responseHolder = makeSQLQueryRequest(client, query,
HttpResponseStatus.OK);
+ String content = responseHolder.getContent();
+ List<Map<String, Object>> responseMap = jsonMapper.readValue(content,
SYS_SCHEMA_RESULTS_TYPE_REFERENCE);
+ if (isServerQuery) {
+ responseMap = getServersWithoutCurrentSize(responseMap);
+ }
+ Assert.assertEquals(responseMap, expectedResults);
+ }
+
+ void verifySystemSchemaQuery(
+ HttpClient client,
+ String query,
+ List<Map<String, Object>> expectedResults
+ ) throws Exception
+ {
+ verifySystemSchemaQueryBase(client, query, expectedResults, false);
+ }
+
+ void verifySystemSchemaServerQuery(
+ HttpClient client,
+ String query,
+ List<Map<String, Object>> expectedResults
+ ) throws Exception
+ {
+ verifySystemSchemaQueryBase(client, query, expectedResults, true);
+ }
+
+ void verifySystemSchemaQueryFailure(
+ HttpClient client,
+ String query,
+ HttpResponseStatus expectedErrorStatus,
+ String expectedErrorMessage
+ ) throws Exception
+ {
+ StatusResponseHolder responseHolder = makeSQLQueryRequest(client, query,
expectedErrorStatus);
+ Assert.assertEquals(responseHolder.getStatus(), expectedErrorStatus);
+ Assert.assertEquals(responseHolder.getContent(), expectedErrorMessage);
+ }
+
+ String getBrokerAvacticaUrl()
+ {
+ return "jdbc:avatica:remote:url=" + config.getBrokerUrl() +
DruidAvaticaHandler.AVATICA_PATH;
+ }
+
+ String getRouterAvacticaUrl()
+ {
+ return "jdbc:avatica:remote:url=" + config.getRouterUrl() +
DruidAvaticaHandler.AVATICA_PATH;
+ }
+
+ void verifyAdminOptionsRequest()
+ {
+ HttpClient adminClient = new CredentialedHttpClient(
+ new BasicCredentials("admin", "priest"),
+ httpClient
+ );
+ testOptionsRequests(adminClient);
+ }
+
+ void verifyAuthenticatioInvalidAuthNameFails()
+ {
+ verifyInvalidAuthNameFails(StringUtils.format(
+ "%s/druid-ext/basic-security/authentication/listen/%s",
+ config.getCoordinatorUrl(),
+ INVALID_NAME
+ ));
+ }
+
+ void verifyAuthorizationInvalidAuthNameFails()
+ {
+ verifyInvalidAuthNameFails(StringUtils.format(
+ "%s/druid-ext/basic-security/authorization/listen/users/%s",
+ config.getCoordinatorUrl(),
+ INVALID_NAME
+ ));
+ }
+
+ void verifyGroupMappingsInvalidAuthNameFails()
+ {
+ verifyInvalidAuthNameFails(StringUtils.format(
+ "%s/druid-ext/basic-security/authorization/listen/groupMappings/%s",
+ config.getCoordinatorUrl(),
+ INVALID_NAME
+ ));
+ }
+
+ void verifyInvalidAuthNameFails(String endpoint)
+ {
+ HttpClient adminClient = new CredentialedHttpClient(
+ new BasicCredentials("admin", "priest"),
+ httpClient
+ );
+
+ HttpUtil.makeRequestWithExpectedStatus(
+ adminClient,
+ HttpMethod.POST,
+ endpoint,
+ "SERIALIZED_DATA".getBytes(StandardCharsets.UTF_8),
+ HttpResponseStatus.INTERNAL_SERVER_ERROR
+ );
+ }
+
+ void verifyMaliciousUser()
+ {
+ String maliciousUsername = "<script>alert('hello')</script>";
+ HttpClient maliciousClient = new CredentialedHttpClient(
+ new BasicCredentials(maliciousUsername, "noPass"),
+ httpClient
+ );
+ StatusResponseHolder responseHolder =
HttpUtil.makeRequestWithExpectedStatus(
+ maliciousClient,
+ HttpMethod.GET,
+ config.getBrokerUrl() + "/status",
+ null,
+ HttpResponseStatus.UNAUTHORIZED
+ );
+ String responseContent = responseHolder.getContent();
+
Assert.assertTrue(responseContent.contains("<tr><th>MESSAGE:</th><td>Unauthorized</td></tr>"));
+ Assert.assertFalse(responseContent.contains(maliciousUsername));
+ }
+
+ void setupHttpClients() throws Exception
+ {
+ setupCommonHttpClients();
+ setupTestSpecificHttpClients();
+ }
+
+ abstract void setupUsers() throws Exception;
+
+ void setupCommonHttpClients()
+ {
+ adminClient = new CredentialedHttpClient(
+ new BasicCredentials("admin", "priest"),
+ httpClient
+ );
+
+ datasourceOnlyUserClient = new CredentialedHttpClient(
+ new BasicCredentials("datasourceOnlyUser", "helloworld"),
+ httpClient
+ );
+
+ datasourceWithStateUserClient = new CredentialedHttpClient(
+ new BasicCredentials("datasourceWithStateUser", "helloworld"),
+ httpClient
+ );
+
+ stateOnlyUserClient = new CredentialedHttpClient(
+ new BasicCredentials("stateOnlyUser", "helloworld"),
+ httpClient
+ );
+
+ internalSystemClient = new CredentialedHttpClient(
+ new BasicCredentials("druid_system", "warlock"),
+ httpClient
+ );
+ }
+
+ abstract void setupTestSpecificHttpClients() throws Exception;
+
+ void setExpectedSystemSchemaObjects() throws IOException
+ {
+ // initial setup is done now, run the system schema response content tests
+ adminSegments = jsonMapper.readValue(
+
TestQueryHelper.class.getResourceAsStream(SYSTEM_SCHEMA_SEGMENTS_RESULTS_RESOURCE),
+ SYS_SCHEMA_RESULTS_TYPE_REFERENCE
+ );
+
+ adminTasks = jsonMapper.readValue(
+
TestQueryHelper.class.getResourceAsStream(SYSTEM_SCHEMA_TASKS_RESULTS_RESOURCE),
+ SYS_SCHEMA_RESULTS_TYPE_REFERENCE
+ );
+
+ adminServers = getServersWithoutCurrentSize(
+ jsonMapper.readValue(
+ fillServersTemplate(
+ config,
+
AbstractIndexerTest.getResourceAsString(SYSTEM_SCHEMA_SERVERS_RESULTS_RESOURCE)
+ ),
+ SYS_SCHEMA_RESULTS_TYPE_REFERENCE
+ )
+ );
+
+ adminServerSegments = jsonMapper.readValue(
+ fillSegementServersTemplate(
+ config,
+
AbstractIndexerTest.getResourceAsString(SYSTEM_SCHEMA_SERVER_SEGMENTS_RESULTS_RESOURCE)
+ ),
+ SYS_SCHEMA_RESULTS_TYPE_REFERENCE
+ );
+ }
+
+ /**
+ * curr_size on historicals changes because cluster state is not isolated
across different
+ * integration tests, zero it out for consistent test results
+ */
+ static List<Map<String, Object>>
getServersWithoutCurrentSize(List<Map<String, Object>> servers)
+ {
+ return Lists.transform(
+ servers,
+ (server) -> {
+ Map<String, Object> newServer = new HashMap<>(server);
+ newServer.put("curr_size", 0);
+ return newServer;
+ }
+ );
+ }
+
+ static String fillSegementServersTemplate(IntegrationTestingConfig config,
String template)
+ {
+ return StringUtils.replace(template, "%%HISTORICAL%%",
config.getHistoricalInternalHost());
+ }
+
+ static String fillServersTemplate(IntegrationTestingConfig config, String
template)
+ {
+ String json = StringUtils.replace(template, "%%HISTORICAL%%",
config.getHistoricalInternalHost());
+ json = StringUtils.replace(json, "%%BROKER%%",
config.getBrokerInternalHost());
+ json = StringUtils.replace(json, "%%NON_LEADER%%",
String.valueOf(NullHandling.defaultLongValue()));
+ return json;
+ }
+
+ abstract String getAuthenticatorName();
+
+ abstract String getAuthorizerName();
+
+ abstract String getExpectedAvaticaAuthError();
+}
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java
b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java
index 4d674b9..37cf349 100644
---
a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java
+++
b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthConfigurationTest.java
@@ -19,227 +19,63 @@
package org.apache.druid.tests.security;
-import com.fasterxml.jackson.core.type.TypeReference;
-import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Lists;
-import com.google.inject.Inject;
-import org.apache.calcite.avatica.AvaticaSqlException;
-import org.apache.druid.common.config.NullHandling;
-import org.apache.druid.guice.annotations.Client;
import org.apache.druid.java.util.common.StringUtils;
-import org.apache.druid.java.util.common.jackson.JacksonUtils;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.java.util.http.client.CredentialedHttpClient;
import org.apache.druid.java.util.http.client.HttpClient;
import org.apache.druid.java.util.http.client.auth.BasicCredentials;
-import org.apache.druid.java.util.http.client.response.StatusResponseHolder;
import
org.apache.druid.security.basic.authentication.entity.BasicAuthenticatorCredentialUpdate;
import org.apache.druid.server.security.Action;
import org.apache.druid.server.security.Resource;
import org.apache.druid.server.security.ResourceAction;
import org.apache.druid.server.security.ResourceType;
-import org.apache.druid.sql.avatica.DruidAvaticaHandler;
-import org.apache.druid.testing.IntegrationTestingConfig;
-import org.apache.druid.testing.clients.CoordinatorResourceTestClient;
import org.apache.druid.testing.guice.DruidTestModuleFactory;
import org.apache.druid.testing.utils.HttpUtil;
import org.apache.druid.testing.utils.ITRetryUtil;
-import org.apache.druid.testing.utils.TestQueryHelper;
import org.apache.druid.tests.TestNGGroup;
-import org.apache.druid.tests.indexer.AbstractIndexerTest;
import org.jboss.netty.handler.codec.http.HttpMethod;
import org.jboss.netty.handler.codec.http.HttpResponseStatus;
-import org.testng.Assert;
-import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.BeforeClass;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
-import java.nio.charset.StandardCharsets;
-import java.sql.Connection;
-import java.sql.DriverManager;
-import java.sql.ResultSet;
-import java.sql.Statement;
import java.util.Collections;
-import java.util.HashMap;
import java.util.List;
-import java.util.Map;
-import java.util.Properties;
import java.util.stream.Collectors;
@Test(groups = TestNGGroup.SECURITY)
@Guice(moduleFactory = DruidTestModuleFactory.class)
-public class ITBasicAuthConfigurationTest
+public class ITBasicAuthConfigurationTest extends AbstractAuthConfigurationTest
{
private static final Logger LOG = new
Logger(ITBasicAuthConfigurationTest.class);
- private static final TypeReference<List<Map<String, Object>>>
SYS_SCHEMA_RESULTS_TYPE_REFERENCE =
- new TypeReference<List<Map<String, Object>>>()
- {
- };
+ private static final String BASIC_AUTHENTICATOR = "basic";
+ private static final String BASIC_AUTHORIZER = "basic";
- private static final String SYSTEM_SCHEMA_SEGMENTS_RESULTS_RESOURCE =
- "/results/auth_test_sys_schema_segments.json";
- private static final String SYSTEM_SCHEMA_SERVER_SEGMENTS_RESULTS_RESOURCE =
- "/results/auth_test_sys_schema_server_segments.json";
- private static final String SYSTEM_SCHEMA_SERVERS_RESULTS_RESOURCE =
- "/results/auth_test_sys_schema_servers.json";
- private static final String SYSTEM_SCHEMA_TASKS_RESULTS_RESOURCE =
- "/results/auth_test_sys_schema_tasks.json";
+ private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while
executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver
error: BasicSecurityAuthenticationException: User metadata store authentication
failed.";
- private static final String SYS_SCHEMA_SEGMENTS_QUERY =
- "SELECT * FROM sys.segments WHERE datasource IN ('auth_test')";
+ private HttpClient druid99;
- private static final String SYS_SCHEMA_SERVERS_QUERY =
- "SELECT * FROM sys.servers WHERE tier IS NOT NULL";
-
- private static final String SYS_SCHEMA_SERVER_SEGMENTS_QUERY =
- "SELECT * FROM sys.server_segments WHERE segment_id LIKE 'auth_test%'";
-
- private static final String SYS_SCHEMA_TASKS_QUERY =
- "SELECT * FROM sys.tasks WHERE datasource IN ('auth_test')";
-
- @Inject
- IntegrationTestingConfig config;
-
- @Inject
- ObjectMapper jsonMapper;
-
- @Inject
- @Client
- HttpClient httpClient;
-
- @Inject
- private CoordinatorResourceTestClient coordinatorClient;
-
- @BeforeMethod
- public void before()
+ @BeforeClass
+ public void before() throws Exception
{
// ensure that auth_test segments are loaded completely, we use them for
testing system schema tables
ITRetryUtil.retryUntilTrue(
() -> coordinatorClient.areSegmentsLoaded("auth_test"), "auth_test
segment load"
);
+
+ setupHttpClients();
+ setupUsers();
+ setExpectedSystemSchemaObjects();
}
@Test
- public void testSystemSchemaAccess() throws Exception
+ public void test_systemSchemaAccess_admin() throws Exception
{
- HttpClient adminClient = new CredentialedHttpClient(
- new BasicCredentials("admin", "priest"),
- httpClient
- );
-
// check that admin access works on all nodes
checkNodeAccess(adminClient);
- // create a new user+role that can only read 'auth_test'
- List<ResourceAction> readDatasourceOnlyPermissions =
Collections.singletonList(
- new ResourceAction(
- new Resource("auth_test", ResourceType.DATASOURCE),
- Action.READ
- )
- );
- createUserAndRoleWithPermissions(
- adminClient,
- "datasourceOnlyUser",
- "helloworld",
- "datasourceOnlyRole",
- readDatasourceOnlyPermissions
- );
- HttpClient datasourceOnlyUserClient = new CredentialedHttpClient(
- new BasicCredentials("datasourceOnlyUser", "helloworld"),
- httpClient
- );
-
- // create a new user+role that can only read 'auth_test' + STATE read
access
- List<ResourceAction> readDatasourceWithStatePermissions = ImmutableList.of(
- new ResourceAction(
- new Resource("auth_test", ResourceType.DATASOURCE),
- Action.READ
- ),
- new ResourceAction(
- new Resource(".*", ResourceType.STATE),
- Action.READ
- )
- );
- createUserAndRoleWithPermissions(
- adminClient,
- "datasourceWithStateUser",
- "helloworld",
- "datasourceWithStateRole",
- readDatasourceWithStatePermissions
- );
- HttpClient datasourceWithStateUserClient = new CredentialedHttpClient(
- new BasicCredentials("datasourceWithStateUser", "helloworld"),
- httpClient
- );
-
- // create a new user+role with only STATE read access
- List<ResourceAction> stateOnlyPermissions = ImmutableList.of(
- new ResourceAction(
- new Resource(".*", ResourceType.STATE),
- Action.READ
- )
- );
- createUserAndRoleWithPermissions(
- adminClient,
- "stateOnlyUser",
- "helloworld",
- "stateOnlyRole",
- stateOnlyPermissions
- );
- HttpClient stateOnlyUserClient = new CredentialedHttpClient(
- new BasicCredentials("stateOnlyUser", "helloworld"),
- httpClient
- );
-
- // check that we can access a datasource-permission restricted resource on
the broker
- HttpUtil.makeRequest(
- datasourceOnlyUserClient,
- HttpMethod.GET,
- config.getBrokerUrl() + "/druid/v2/datasources/auth_test",
- null
- );
-
- // check that we can access a state-permission restricted resource on the
broker
- HttpUtil.makeRequest(
- datasourceWithStateUserClient,
- HttpMethod.GET,
- config.getBrokerUrl() + "/status",
- null
- );
- HttpUtil.makeRequest(stateOnlyUserClient, HttpMethod.GET,
config.getBrokerUrl() + "/status", null);
-
- // initial setup is done now, run the system schema response content tests
- final List<Map<String, Object>> adminSegments = jsonMapper.readValue(
-
TestQueryHelper.class.getResourceAsStream(SYSTEM_SCHEMA_SEGMENTS_RESULTS_RESOURCE),
- SYS_SCHEMA_RESULTS_TYPE_REFERENCE
- );
-
- final List<Map<String, Object>> adminServerSegments = jsonMapper.readValue(
- fillSegementServersTemplate(
- config,
-
AbstractIndexerTest.getResourceAsString(SYSTEM_SCHEMA_SERVER_SEGMENTS_RESULTS_RESOURCE)
- ),
- SYS_SCHEMA_RESULTS_TYPE_REFERENCE
- );
-
- final List<Map<String, Object>> adminServers =
getServersWithoutCurrentSize(
- jsonMapper.readValue(
- fillServersTemplate(
- config,
-
AbstractIndexerTest.getResourceAsString(SYSTEM_SCHEMA_SERVERS_RESULTS_RESOURCE)
- ),
- SYS_SCHEMA_RESULTS_TYPE_REFERENCE
- )
- );
-
- final List<Map<String, Object>> adminTasks = jsonMapper.readValue(
-
TestQueryHelper.class.getResourceAsStream(SYSTEM_SCHEMA_TASKS_RESULTS_RESOURCE),
- SYS_SCHEMA_RESULTS_TYPE_REFERENCE
- );
-
// as admin
LOG.info("Checking sys.segments query as admin...");
verifySystemSchemaQuery(
@@ -268,6 +104,18 @@ public class ITBasicAuthConfigurationTest
SYS_SCHEMA_TASKS_QUERY,
adminTasks
);
+ }
+
+ @Test
+ public void test_systemSchemaAccess_datasourceOnlyUser() throws Exception
+ {
+ // check that we can access a datasource-permission restricted resource on
the broker
+ HttpUtil.makeRequest(
+ datasourceOnlyUserClient,
+ HttpMethod.GET,
+ config.getBrokerUrl() + "/druid/v2/datasources/auth_test",
+ null
+ );
// as user that can only read auth_test
LOG.info("Checking sys.segments query as datasourceOnlyUser...");
@@ -303,6 +151,18 @@ public class ITBasicAuthConfigurationTest
.filter((taskEntry) ->
"auth_test".equals(taskEntry.get("datasource")))
.collect(Collectors.toList())
);
+ }
+
+ @Test
+ public void test_systemSchemaAccess_datasourceWithStateUser() throws
Exception
+ {
+ // check that we can access a state-permission restricted resource on the
broker
+ HttpUtil.makeRequest(
+ datasourceWithStateUserClient,
+ HttpMethod.GET,
+ config.getBrokerUrl() + "/status",
+ null
+ );
// as user that can read auth_test and STATE
LOG.info("Checking sys.segments query as datasourceWithStateUser...");
@@ -339,6 +199,12 @@ public class ITBasicAuthConfigurationTest
.filter((taskEntry) ->
"auth_test".equals(taskEntry.get("datasource")))
.collect(Collectors.toList())
);
+ }
+
+ @Test
+ public void test_systemSchemaAccess_stateOnlyUser() throws Exception
+ {
+ HttpUtil.makeRequest(stateOnlyUserClient, HttpMethod.GET,
config.getBrokerUrl() + "/status", null);
// as user that can only read STATE
LOG.info("Checking sys.segments query as stateOnlyUser...");
@@ -371,34 +237,147 @@ public class ITBasicAuthConfigurationTest
}
@Test
- public void testAuthConfiguration() throws Exception
+ public void test_unsecuredPathWithoutCredentials_allowed()
{
- HttpClient adminClient = new CredentialedHttpClient(
- new BasicCredentials("admin", "priest"),
- httpClient
- );
+ // check that we are allowed to access unsecured path without credentials.
+ checkUnsecuredCoordinatorLoadQueuePath(httpClient);
+ }
- HttpClient internalSystemClient = new CredentialedHttpClient(
- new BasicCredentials("druid_system", "warlock"),
- httpClient
- );
+ @Test
+ public void test_admin_hasNodeAccess()
+ {
+ checkNodeAccess(adminClient);
+ }
- HttpClient newUserClient = new CredentialedHttpClient(
- new BasicCredentials("druid", "helloworld"),
- httpClient
- );
+ @Test
+ public void test_admin_loadStatus() throws Exception
+ {
+ checkLoadStatus(adminClient);
+ }
- final HttpClient unsecuredClient = httpClient;
+ @Test
+ public void test_internalSystemUser_hasNodeAccess()
+ {
+ checkNodeAccess(internalSystemClient);
+ }
- // check that we are allowed to access unsecured path without credentials.
- checkUnsecuredCoordinatorLoadQueuePath(unsecuredClient);
- // check that admin works
- checkNodeAccess(adminClient);
+ @Test
+ public void test_druid99User_hasNodeAccess()
+ {
+ checkNodeAccess(druid99);
+ }
- // check that internal user works
- checkNodeAccess(internalSystemClient);
+ @Test
+ public void test_avaticaQuery_broker()
+ {
+ testAvaticaQuery(getBrokerAvacticaUrl());
+ }
+
+ @Test
+ public void test_avaticaQuery_router()
+ {
+ testAvaticaQuery(getRouterAvacticaUrl());
+ }
+
+ @Test
+ public void test_avaticaQueryAuthFailure_broker() throws Exception
+ {
+ testAvaticaAuthFailure(getBrokerAvacticaUrl());
+ }
+
+ @Test
+ public void test_avaticaQueryAuthFailure_router() throws Exception
+ {
+ testAvaticaAuthFailure(getRouterAvacticaUrl());
+ }
+
+ @Test
+ public void test_admin_optionsRequest()
+ {
+ verifyAdminOptionsRequest();
+ }
+
+ @Test
+ public void test_authentication_invalidAuthName_fails()
+ {
+ verifyAuthenticatioInvalidAuthNameFails();
+ }
+
+ @Test
+ public void test_authorization_invalidAuthName_fails()
+ {
+ verifyAuthorizationInvalidAuthNameFails();
+ }
+
+ @Test
+ public void test_groupMappings_invalidAuthName_fails()
+ {
+ verifyGroupMappingsInvalidAuthNameFails();
+ }
+
+ @Test
+ public void testMaliciousUser()
+ {
+ verifyMaliciousUser();
+ }
+
+ @Override
+ void setupUsers() throws Exception
+ {
+ // create a new user+role that can only read 'auth_test'
+ List<ResourceAction> readDatasourceOnlyPermissions =
Collections.singletonList(
+ new ResourceAction(
+ new Resource("auth_test", ResourceType.DATASOURCE),
+ Action.READ
+ )
+ );
+ createUserAndRoleWithPermissions(
+ adminClient,
+ "datasourceOnlyUser",
+ "helloworld",
+ "datasourceOnlyRole",
+ readDatasourceOnlyPermissions
+ );
+
+ // create a new user+role that can only read 'auth_test' + STATE read
access
+ List<ResourceAction> readDatasourceWithStatePermissions = ImmutableList.of(
+ new ResourceAction(
+ new Resource("auth_test", ResourceType.DATASOURCE),
+ Action.READ
+ ),
+ new ResourceAction(
+ new Resource(".*", ResourceType.STATE),
+ Action.READ
+ )
+ );
+ createUserAndRoleWithPermissions(
+ adminClient,
+ "datasourceWithStateUser",
+ "helloworld",
+ "datasourceWithStateRole",
+ readDatasourceWithStatePermissions
+ );
+
+ // create a new user+role with only STATE read access
+ List<ResourceAction> stateOnlyPermissions = ImmutableList.of(
+ new ResourceAction(
+ new Resource(".*", ResourceType.STATE),
+ Action.READ
+ )
+ );
+ createUserAndRoleWithPermissions(
+ adminClient,
+ "stateOnlyUser",
+ "helloworld",
+ "stateOnlyRole",
+ stateOnlyPermissions
+ );
+ }
+ @Override
+ void setupTestSpecificHttpClients() throws Exception
+ {
// create a new user+role that can read /status
List<ResourceAction> permissions = Collections.singletonList(
new ResourceAction(
@@ -414,12 +393,6 @@ public class ITBasicAuthConfigurationTest
permissions
);
- // check that the new user works
- checkNodeAccess(newUserClient);
-
- // check loadStatus
- checkLoadStatus(adminClient);
-
// create 100 users
for (int i = 0; i < 100; i++) {
HttpUtil.makeRequest(
@@ -454,201 +427,10 @@ public class ITBasicAuthConfigurationTest
null
);
- HttpClient newUser99Client = new CredentialedHttpClient(
+ druid99 = new CredentialedHttpClient(
new BasicCredentials("druid99", "helloworld"),
httpClient
);
-
- LOG.info("Checking access for user druid99.");
- checkNodeAccess(newUser99Client);
-
- String brokerUrl = "jdbc:avatica:remote:url=" + config.getBrokerUrl() +
DruidAvaticaHandler.AVATICA_PATH;
- String routerUrl = "jdbc:avatica:remote:url=" + config.getRouterUrl() +
DruidAvaticaHandler.AVATICA_PATH;
-
- LOG.info("Checking Avatica query on broker.");
- testAvaticaQuery(brokerUrl);
-
- LOG.info("Checking Avatica query on router.");
- testAvaticaQuery(routerUrl);
-
- LOG.info("Testing Avatica query on broker with incorrect credentials.");
- testAvaticaAuthFailure(brokerUrl);
-
- LOG.info("Testing Avatica query on router with incorrect credentials.");
- testAvaticaAuthFailure(routerUrl);
-
- LOG.info("Checking OPTIONS requests on services...");
- testOptionsRequests(adminClient);
- }
-
- @Test
- public void testInvalidAuthNames()
- {
- String invalidName = "invalid%2Fname";
- HttpClient adminClient = new CredentialedHttpClient(
- new BasicCredentials("admin", "priest"),
- httpClient
- );
-
- HttpUtil.makeRequestWithExpectedStatus(
- adminClient,
- HttpMethod.POST,
- StringUtils.format(
- "%s/druid-ext/basic-security/authentication/listen/%s",
- config.getCoordinatorUrl(),
- invalidName
- ),
- "SERIALIZED_DATA".getBytes(StandardCharsets.UTF_8),
- HttpResponseStatus.INTERNAL_SERVER_ERROR
- );
-
- HttpUtil.makeRequestWithExpectedStatus(
- adminClient,
- HttpMethod.POST,
- StringUtils.format(
- "%s/druid-ext/basic-security/authorization/listen/users/%s",
- config.getCoordinatorUrl(),
- invalidName
- ),
- "SERIALIZED_DATA".getBytes(StandardCharsets.UTF_8),
- HttpResponseStatus.INTERNAL_SERVER_ERROR
- );
-
- HttpUtil.makeRequestWithExpectedStatus(
- adminClient,
- HttpMethod.POST,
- StringUtils.format(
-
"%s/druid-ext/basic-security/authorization/listen/groupMappings/%s",
- config.getCoordinatorUrl(),
- invalidName
- ),
- "SERIALIZED_DATA".getBytes(StandardCharsets.UTF_8),
- HttpResponseStatus.INTERNAL_SERVER_ERROR
- );
- }
-
- @Test
- public void testMaliciousUser()
- {
- String maliciousUsername = "<script>alert('hello')</script>";
- HttpClient maliciousClient = new CredentialedHttpClient(
- new BasicCredentials(maliciousUsername, "noPass"),
- httpClient
- );
- StatusResponseHolder responseHolder =
HttpUtil.makeRequestWithExpectedStatus(
- maliciousClient,
- HttpMethod.GET,
- config.getBrokerUrl() + "/status",
- null,
- HttpResponseStatus.UNAUTHORIZED
- );
- String responseContent = responseHolder.getContent();
-
Assert.assertTrue(responseContent.contains("<tr><th>MESSAGE:</th><td>Unauthorized</td></tr>"));
- Assert.assertFalse(responseContent.contains(maliciousUsername));
- }
-
- private void testOptionsRequests(HttpClient httpClient)
- {
- HttpUtil.makeRequest(httpClient, HttpMethod.OPTIONS,
config.getCoordinatorUrl() + "/status", null);
- HttpUtil.makeRequest(httpClient, HttpMethod.OPTIONS,
config.getOverlordUrl() + "/status", null);
- HttpUtil.makeRequest(httpClient, HttpMethod.OPTIONS, config.getBrokerUrl()
+ "/status", null);
- HttpUtil.makeRequest(httpClient, HttpMethod.OPTIONS,
config.getHistoricalUrl() + "/status", null);
- HttpUtil.makeRequest(httpClient, HttpMethod.OPTIONS, config.getRouterUrl()
+ "/status", null);
- }
-
- private void checkUnsecuredCoordinatorLoadQueuePath(HttpClient client)
- {
- HttpUtil.makeRequest(client, HttpMethod.GET, config.getCoordinatorUrl() +
"/druid/coordinator/v1/loadqueue", null);
- }
-
- private void testAvaticaQuery(String url)
- {
- LOG.info("URL: " + url);
- try {
- Properties connectionProperties = new Properties();
- connectionProperties.setProperty("user", "admin");
- connectionProperties.setProperty("password", "priest");
- Connection connection = DriverManager.getConnection(url,
connectionProperties);
- Statement statement = connection.createStatement();
- statement.setMaxRows(450);
- String query = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS";
- ResultSet resultSet = statement.executeQuery(query);
- Assert.assertTrue(resultSet.next());
- statement.close();
- connection.close();
- }
- catch (Exception e) {
- throw new RuntimeException(e);
- }
- }
-
- private void testAvaticaAuthFailure(String url) throws Exception
- {
- LOG.info("URL: " + url);
- try {
- Properties connectionProperties = new Properties();
- connectionProperties.setProperty("user", "admin");
- connectionProperties.setProperty("password", "wrongpassword");
- Connection connection = DriverManager.getConnection(url,
connectionProperties);
- Statement statement = connection.createStatement();
- statement.setMaxRows(450);
- String query = "SELECT * FROM INFORMATION_SCHEMA.COLUMNS";
- statement.executeQuery(query);
- }
- catch (AvaticaSqlException ase) {
- Assert.assertEquals(
- ase.getErrorMessage(),
- "Error while executing SQL \"SELECT * FROM
INFORMATION_SCHEMA.COLUMNS\": Remote driver error:
BasicSecurityAuthenticationException: User metadata store authentication
failed."
- );
- return;
- }
- Assert.fail("Test failed, did not get AvaticaSqlException.");
- }
-
-
- private void checkNodeAccess(HttpClient httpClient)
- {
- HttpUtil.makeRequest(httpClient, HttpMethod.GET,
config.getCoordinatorUrl() + "/status", null);
- HttpUtil.makeRequest(httpClient, HttpMethod.GET, config.getOverlordUrl() +
"/status", null);
- HttpUtil.makeRequest(httpClient, HttpMethod.GET, config.getBrokerUrl() +
"/status", null);
- HttpUtil.makeRequest(httpClient, HttpMethod.GET, config.getHistoricalUrl()
+ "/status", null);
- HttpUtil.makeRequest(httpClient, HttpMethod.GET, config.getRouterUrl() +
"/status", null);
- }
-
- private void checkLoadStatus(HttpClient httpClient) throws Exception
- {
- checkLoadStatusSingle(httpClient, config.getCoordinatorUrl());
- checkLoadStatusSingle(httpClient, config.getOverlordUrl());
- checkLoadStatusSingle(httpClient, config.getBrokerUrl());
- checkLoadStatusSingle(httpClient, config.getHistoricalUrl());
- checkLoadStatusSingle(httpClient, config.getRouterUrl());
- }
-
- private void checkLoadStatusSingle(HttpClient httpClient, String baseUrl)
throws Exception
- {
- StatusResponseHolder holder = HttpUtil.makeRequest(
- httpClient,
- HttpMethod.GET,
- baseUrl + "/druid-ext/basic-security/authentication/loadStatus",
- null
- );
- String content = holder.getContent();
- Map<String, Boolean> loadStatus = jsonMapper.readValue(content,
JacksonUtils.TYPE_REFERENCE_MAP_STRING_BOOLEAN);
-
- Assert.assertNotNull(loadStatus.get("basic"));
- Assert.assertTrue(loadStatus.get("basic"));
-
- holder = HttpUtil.makeRequest(
- httpClient,
- HttpMethod.GET,
- baseUrl + "/druid-ext/basic-security/authorization/loadStatus",
- null
- );
- content = holder.getContent();
- loadStatus = jsonMapper.readValue(content,
JacksonUtils.TYPE_REFERENCE_MAP_STRING_BOOLEAN);
-
- Assert.assertNotNull(loadStatus.get("basic"));
- Assert.assertTrue(loadStatus.get("basic"));
}
private void createUserAndRoleWithPermissions(
@@ -723,97 +505,21 @@ public class ITBasicAuthConfigurationTest
);
}
- private StatusResponseHolder makeSQLQueryRequest(
- HttpClient httpClient,
- String query,
- HttpResponseStatus expectedStatus
- ) throws Exception
- {
- Map<String, Object> queryMap = ImmutableMap.of(
- "query", query
- );
- return HttpUtil.makeRequestWithExpectedStatus(
- httpClient,
- HttpMethod.POST,
- config.getBrokerUrl() + "/druid/v2/sql",
- jsonMapper.writeValueAsBytes(queryMap),
- expectedStatus
- );
- }
-
- private void verifySystemSchemaQueryBase(
- HttpClient client,
- String query,
- List<Map<String, Object>> expectedResults,
- boolean isServerQuery
- ) throws Exception
+ @Override
+ String getAuthenticatorName()
{
- StatusResponseHolder responseHolder = makeSQLQueryRequest(client, query,
HttpResponseStatus.OK);
- String content = responseHolder.getContent();
- List<Map<String, Object>> responseMap = jsonMapper.readValue(content,
SYS_SCHEMA_RESULTS_TYPE_REFERENCE);
- if (isServerQuery) {
- responseMap = getServersWithoutCurrentSize(responseMap);
- }
- Assert.assertEquals(responseMap, expectedResults);
- }
-
- private void verifySystemSchemaQuery(
- HttpClient client,
- String query,
- List<Map<String, Object>> expectedResults
- ) throws Exception
- {
- verifySystemSchemaQueryBase(client, query, expectedResults, false);
- }
-
- private void verifySystemSchemaServerQuery(
- HttpClient client,
- String query,
- List<Map<String, Object>> expectedResults
- ) throws Exception
- {
- verifySystemSchemaQueryBase(client, query, expectedResults, true);
- }
-
- private void verifySystemSchemaQueryFailure(
- HttpClient client,
- String query,
- HttpResponseStatus expectedErrorStatus,
- String expectedErrorMessage
- ) throws Exception
- {
- StatusResponseHolder responseHolder = makeSQLQueryRequest(client, query,
expectedErrorStatus);
- Assert.assertEquals(responseHolder.getStatus(), expectedErrorStatus);
- Assert.assertEquals(responseHolder.getContent(), expectedErrorMessage);
- }
-
- /**
- * curr_size on historicals changes because cluster state is not isolated
across different
- * integration tests, zero it out for consistent test results
- */
- private static List<Map<String, Object>>
getServersWithoutCurrentSize(List<Map<String, Object>> servers)
- {
- return Lists.transform(
- servers,
- (server) -> {
- Map<String, Object> newServer = new HashMap<>(server);
- newServer.put("curr_size", 0);
- return newServer;
- }
- );
+ return BASIC_AUTHENTICATOR;
}
- private static String fillSegementServersTemplate(IntegrationTestingConfig
config, String template)
+ @Override
+ String getAuthorizerName()
{
- String json = StringUtils.replace(template, "%%HISTORICAL%%",
config.getHistoricalInternalHost());
- return json;
+ return BASIC_AUTHORIZER;
}
- private static String fillServersTemplate(IntegrationTestingConfig config,
String template)
+ @Override
+ String getExpectedAvaticaAuthError()
{
- String json = StringUtils.replace(template, "%%HISTORICAL%%",
config.getHistoricalInternalHost());
- json = StringUtils.replace(json, "%%BROKER%%",
config.getBrokerInternalHost());
- json = StringUtils.replace(json, "%%NON_LEADER%%",
String.valueOf(NullHandling.defaultLongValue()));
- return json;
+ return EXPECTED_AVATICA_AUTH_ERROR;
}
}
diff --git
a/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java
b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java
new file mode 100644
index 0000000..219e664
--- /dev/null
+++
b/integration-tests/src/test/java/org/apache/druid/tests/security/ITBasicAuthLdapConfigurationTest.java
@@ -0,0 +1,541 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.tests.security;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.inject.Inject;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.java.util.common.logger.Logger;
+import org.apache.druid.java.util.http.client.CredentialedHttpClient;
+import org.apache.druid.java.util.http.client.HttpClient;
+import org.apache.druid.java.util.http.client.auth.BasicCredentials;
+import
org.apache.druid.security.basic.authorization.entity.BasicAuthorizerGroupMapping;
+import org.apache.druid.server.security.Action;
+import org.apache.druid.server.security.Resource;
+import org.apache.druid.server.security.ResourceAction;
+import org.apache.druid.server.security.ResourceType;
+import org.apache.druid.testing.IntegrationTestingConfig;
+import org.apache.druid.testing.clients.CoordinatorResourceTestClient;
+import org.apache.druid.testing.guice.DruidTestModuleFactory;
+import org.apache.druid.testing.utils.HttpUtil;
+import org.apache.druid.testing.utils.ITRetryUtil;
+import org.apache.druid.tests.TestNGGroup;
+import org.jboss.netty.handler.codec.http.HttpMethod;
+import org.jboss.netty.handler.codec.http.HttpResponseStatus;
+import org.testng.annotations.BeforeClass;
+import org.testng.annotations.Guice;
+import org.testng.annotations.Test;
+
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+import java.util.stream.Collectors;
+
+@Test(groups = TestNGGroup.LDAP_SECURITY)
+@Guice(moduleFactory = DruidTestModuleFactory.class)
+public class ITBasicAuthLdapConfigurationTest extends
AbstractAuthConfigurationTest
+{
+ private static final Logger LOG = new
Logger(ITBasicAuthLdapConfigurationTest.class);
+
+ private static final String LDAP_AUTHENTICATOR = "ldap";
+ private static final String LDAP_AUTHORIZER = "ldapauth";
+
+ private static final String EXPECTED_AVATICA_AUTH_ERROR = "Error while
executing SQL \"SELECT * FROM INFORMATION_SCHEMA.COLUMNS\": Remote driver
error: BasicSecurityAuthenticationException: User LDAP authentication failed.";
+
+ @Inject
+ IntegrationTestingConfig config;
+
+ @Inject
+ ObjectMapper jsonMapper;
+
+ @Inject
+ private CoordinatorResourceTestClient coordinatorClient;
+
+ private HttpClient druidUserClient;
+ private HttpClient stateOnlyNoLdapGroupUserClient;
+
+ @BeforeClass
+ public void before() throws Exception
+ {
+ // ensure that auth_test segments are loaded completely, we use them for
testing system schema tables
+ ITRetryUtil.retryUntilTrue(
+ () -> coordinatorClient.areSegmentsLoaded("auth_test"), "auth_test
segment load"
+ );
+
+ setupHttpClients();
+ setupUsers();
+ setExpectedSystemSchemaObjects();
+ }
+
+ @Test
+ public void test_systemSchemaAccess_admin() throws Exception
+ {
+ // check that admin access works on all nodes
+ checkNodeAccess(adminClient);
+
+ // as admin
+ LOG.info("Checking sys.segments query as admin...");
+ verifySystemSchemaQuery(
+ adminClient,
+ SYS_SCHEMA_SEGMENTS_QUERY,
+ adminSegments
+ );
+
+ LOG.info("Checking sys.servers query as admin...");
+ verifySystemSchemaServerQuery(
+ adminClient,
+ SYS_SCHEMA_SERVERS_QUERY,
+ getServersWithoutCurrentSize(adminServers)
+ );
+
+ LOG.info("Checking sys.server_segments query as admin...");
+ verifySystemSchemaQuery(
+ adminClient,
+ SYS_SCHEMA_SERVER_SEGMENTS_QUERY,
+ adminServerSegments
+ );
+
+ LOG.info("Checking sys.tasks query as admin...");
+ verifySystemSchemaQuery(
+ adminClient,
+ SYS_SCHEMA_TASKS_QUERY,
+ adminTasks
+ );
+ }
+
+ @Test
+ public void test_systemSchemaAccess_datasourceOnlyUser() throws Exception
+ {
+ // check that we can access a datasource-permission restricted resource on
the broker
+ HttpUtil.makeRequest(
+ datasourceOnlyUserClient,
+ HttpMethod.GET,
+ config.getBrokerUrl() + "/druid/v2/datasources/auth_test",
+ null
+ );
+
+ // as user that can only read auth_test
+ LOG.info("Checking sys.segments query as datasourceOnlyUser...");
+ verifySystemSchemaQuery(
+ datasourceOnlyUserClient,
+ SYS_SCHEMA_SEGMENTS_QUERY,
+ adminSegments.stream()
+ .filter((segmentEntry) ->
"auth_test".equals(segmentEntry.get("datasource")))
+ .collect(Collectors.toList())
+ );
+
+ LOG.info("Checking sys.servers query as datasourceOnlyUser...");
+ verifySystemSchemaQueryFailure(
+ datasourceOnlyUserClient,
+ SYS_SCHEMA_SERVERS_QUERY,
+ HttpResponseStatus.FORBIDDEN,
+ "{\"Access-Check-Result\":\"Insufficient permission to view servers :
Allowed:false, Message:\"}"
+ );
+
+ LOG.info("Checking sys.server_segments query as datasourceOnlyUser...");
+ verifySystemSchemaQueryFailure(
+ datasourceOnlyUserClient,
+ SYS_SCHEMA_SERVER_SEGMENTS_QUERY,
+ HttpResponseStatus.FORBIDDEN,
+ "{\"Access-Check-Result\":\"Insufficient permission to view servers :
Allowed:false, Message:\"}"
+ );
+
+ LOG.info("Checking sys.tasks query as datasourceOnlyUser...");
+ verifySystemSchemaQuery(
+ datasourceOnlyUserClient,
+ SYS_SCHEMA_TASKS_QUERY,
+ adminTasks.stream()
+ .filter((taskEntry) ->
"auth_test".equals(taskEntry.get("datasource")))
+ .collect(Collectors.toList())
+ );
+ }
+
+ @Test
+ public void test_systemSchemaAccess_datasourceWithStateUser() throws
Exception
+ {
+ // check that we can access a state-permission restricted resource on the
broker
+ HttpUtil.makeRequest(
+ datasourceWithStateUserClient,
+ HttpMethod.GET,
+ config.getBrokerUrl() + "/status",
+ null
+ );
+
+ // as user that can read auth_test and STATE
+ LOG.info("Checking sys.segments query as datasourceWithStateUser...");
+ verifySystemSchemaQuery(
+ datasourceWithStateUserClient,
+ SYS_SCHEMA_SEGMENTS_QUERY,
+ adminSegments.stream()
+ .filter((segmentEntry) ->
"auth_test".equals(segmentEntry.get("datasource")))
+ .collect(Collectors.toList())
+ );
+
+ LOG.info("Checking sys.servers query as datasourceWithStateUser...");
+ verifySystemSchemaServerQuery(
+ datasourceWithStateUserClient,
+ SYS_SCHEMA_SERVERS_QUERY,
+ adminServers
+ );
+
+ LOG.info("Checking sys.server_segments query as
datasourceWithStateUser...");
+ verifySystemSchemaQuery(
+ datasourceWithStateUserClient,
+ SYS_SCHEMA_SERVER_SEGMENTS_QUERY,
+ adminServerSegments.stream()
+ .filter((serverSegmentEntry) -> ((String)
serverSegmentEntry.get("segment_id")).contains(
+ "auth_test"))
+ .collect(Collectors.toList())
+ );
+
+ LOG.info("Checking sys.tasks query as datasourceWithStateUser...");
+ verifySystemSchemaQuery(
+ datasourceWithStateUserClient,
+ SYS_SCHEMA_TASKS_QUERY,
+ adminTasks.stream()
+ .filter((taskEntry) ->
"auth_test".equals(taskEntry.get("datasource")))
+ .collect(Collectors.toList())
+ );
+ }
+
+ @Test
+ public void test_systemSchemaAccess_stateOnlyUser() throws Exception
+ {
+ HttpUtil.makeRequest(stateOnlyUserClient, HttpMethod.GET,
config.getBrokerUrl() + "/status", null);
+
+ // as user that can only read STATE
+ LOG.info("Checking sys.segments query as stateOnlyUser...");
+ verifySystemSchemaQuery(
+ stateOnlyUserClient,
+ SYS_SCHEMA_SEGMENTS_QUERY,
+ Collections.emptyList()
+ );
+
+ LOG.info("Checking sys.servers query as stateOnlyUser...");
+ verifySystemSchemaServerQuery(
+ stateOnlyUserClient,
+ SYS_SCHEMA_SERVERS_QUERY,
+ adminServers
+ );
+
+ LOG.info("Checking sys.server_segments query as stateOnlyUser...");
+ verifySystemSchemaQuery(
+ stateOnlyUserClient,
+ SYS_SCHEMA_SERVER_SEGMENTS_QUERY,
+ Collections.emptyList()
+ );
+
+ LOG.info("Checking sys.tasks query as stateOnlyUser...");
+ verifySystemSchemaQuery(
+ stateOnlyUserClient,
+ SYS_SCHEMA_TASKS_QUERY,
+ Collections.emptyList()
+ );
+ }
+
+ @Test
+ public void test_systemSchemaAccess_stateOnlyNoLdapGroupUser() throws
Exception
+ {
+ HttpUtil.makeRequest(stateOnlyUserClient, HttpMethod.GET,
config.getBrokerUrl() + "/status", null);
+
+ // as user that can only read STATE
+ LOG.info("Checking sys.segments query as stateOnlyNoLdapGroupUser...");
+ verifySystemSchemaQuery(
+ stateOnlyNoLdapGroupUserClient,
+ SYS_SCHEMA_SEGMENTS_QUERY,
+ Collections.emptyList()
+ );
+
+ LOG.info("Checking sys.servers query as stateOnlyNoLdapGroupUser...");
+ verifySystemSchemaServerQuery(
+ stateOnlyNoLdapGroupUserClient,
+ SYS_SCHEMA_SERVERS_QUERY,
+ adminServers
+ );
+
+ LOG.info("Checking sys.server_segments query as
stateOnlyNoLdapGroupUser...");
+ verifySystemSchemaQuery(
+ stateOnlyNoLdapGroupUserClient,
+ SYS_SCHEMA_SERVER_SEGMENTS_QUERY,
+ Collections.emptyList()
+ );
+
+ LOG.info("Checking sys.tasks query as stateOnlyNoLdapGroupUser...");
+ verifySystemSchemaQuery(
+ stateOnlyNoLdapGroupUserClient,
+ SYS_SCHEMA_TASKS_QUERY,
+ Collections.emptyList()
+ );
+ }
+
+ @Test
+ public void test_unsecuredPathWithoutCredentials_allowed()
+ {
+ // check that we are allowed to access unsecured path without credentials.
+ checkUnsecuredCoordinatorLoadQueuePath(httpClient);
+ }
+
+ @Test
+ public void test_admin_loadStatus() throws Exception
+ {
+ checkLoadStatus(adminClient);
+ }
+
+ @Test
+ public void test_admin_hasNodeAccess()
+ {
+ checkNodeAccess(adminClient);
+ }
+
+ @Test
+ public void test_internalSystemUser_hasNodeAccess()
+ {
+ checkNodeAccess(internalSystemClient);
+ }
+
+ @Test
+ public void test_druidUser_hasNodeAccess()
+ {
+ checkNodeAccess(druidUserClient);
+ }
+
+ @Test
+ public void test_avaticaQuery_broker()
+ {
+ testAvaticaQuery(getBrokerAvacticaUrl());
+ }
+
+ @Test
+ public void test_avaticaQuery_router()
+ {
+ testAvaticaQuery(getRouterAvacticaUrl());
+ }
+
+ @Test
+ public void test_avaticaQueryAuthFailure_broker() throws Exception
+ {
+ testAvaticaAuthFailure(getBrokerAvacticaUrl());
+ }
+
+ @Test
+ public void test_avaticaQueryAuthFailure_router() throws Exception
+ {
+ testAvaticaAuthFailure(getRouterAvacticaUrl());
+ }
+
+ @Test
+ public void test_admin_optionsRequest()
+ {
+ verifyAdminOptionsRequest();
+ }
+
+ @Test
+ public void test_authentication_invalidAuthName_fails()
+ {
+ verifyAuthenticatioInvalidAuthNameFails();
+ }
+
+ @Test
+ public void test_authorization_invalidAuthName_fails()
+ {
+ verifyAuthorizationInvalidAuthNameFails();
+ }
+
+ @Test
+ public void test_groupMappings_invalidAuthName_fails()
+ {
+ verifyGroupMappingsInvalidAuthNameFails();
+ }
+
+ @Test
+ public void testMaliciousUser()
+ {
+ verifyMaliciousUser();
+ }
+
+ @Override
+ void setupUsers() throws Exception
+ {
+ // create a role that can only read 'auth_test'
+ List<ResourceAction> readDatasourceOnlyPermissions =
Collections.singletonList(
+ new ResourceAction(
+ new Resource("auth_test", ResourceType.DATASOURCE),
+ Action.READ
+ )
+ );
+
+ createRoleWithPermissionsAndGroupMapping(
+ "datasourceOnlyGroup",
+ ImmutableMap.of("datasourceOnlyRole", readDatasourceOnlyPermissions)
+ );
+
+ // create a new role that can only read 'auth_test' + STATE read access
+ List<ResourceAction> readDatasourceWithStatePermissions = ImmutableList.of(
+ new ResourceAction(
+ new Resource("auth_test", ResourceType.DATASOURCE),
+ Action.READ
+ ),
+ new ResourceAction(
+ new Resource(".*", ResourceType.STATE),
+ Action.READ
+ )
+ );
+
+ createRoleWithPermissionsAndGroupMapping(
+ "datasourceWithStateGroup",
+ ImmutableMap.of("datasourceWithStateRole",
readDatasourceWithStatePermissions)
+ );
+
+ // create a new role with only STATE read access
+ List<ResourceAction> stateOnlyPermissions = ImmutableList.of(
+ new ResourceAction(
+ new Resource(".*", ResourceType.STATE),
+ Action.READ
+ )
+ );
+
+ createRoleWithPermissionsAndGroupMapping(
+ "stateOnlyGroup",
+ ImmutableMap.of("stateOnlyRole", stateOnlyPermissions)
+ );
+
+ // create a role that can read /status
+ createRoleWithPermissionsAndGroupMapping(
+ "druidGroup",
+ ImmutableMap.of("druidrole", stateOnlyPermissions)
+ );
+
+ assignUserToRole("stateOnlyNoLdapGroup", "stateOnlyRole");
+ }
+
+ @Override
+ void setupTestSpecificHttpClients()
+ {
+ druidUserClient = new CredentialedHttpClient(
+ new BasicCredentials("druid", "helloworld"),
+ httpClient
+ );
+
+ stateOnlyNoLdapGroupUserClient = new CredentialedHttpClient(
+ new BasicCredentials("stateOnlyNoLdapGroup", "helloworld"),
+ httpClient
+ );
+ }
+
+ private void createRoleWithPermissionsAndGroupMapping(
+ String group,
+ Map<String, List<ResourceAction>> roleTopermissions
+ ) throws Exception
+ {
+ roleTopermissions.keySet().forEach(role -> HttpUtil.makeRequest(
+ adminClient,
+ HttpMethod.POST,
+ StringUtils.format(
+ "%s/druid-ext/basic-security/authorization/db/ldapauth/roles/%s",
+ config.getCoordinatorUrl(),
+ role
+ ),
+ null
+ ));
+
+ for (Map.Entry<String, List<ResourceAction>> entry :
roleTopermissions.entrySet()) {
+ String role = entry.getKey();
+ List<ResourceAction> permissions = entry.getValue();
+ byte[] permissionsBytes = jsonMapper.writeValueAsBytes(permissions);
+ HttpUtil.makeRequest(
+ adminClient,
+ HttpMethod.POST,
+ StringUtils.format(
+
"%s/druid-ext/basic-security/authorization/db/ldapauth/roles/%s/permissions",
+ config.getCoordinatorUrl(),
+ role
+ ),
+ permissionsBytes
+ );
+ }
+
+ String groupMappingName = StringUtils.format("%sMapping", group);
+ BasicAuthorizerGroupMapping groupMapping = new BasicAuthorizerGroupMapping(
+ groupMappingName,
+ StringUtils.format("cn=%s,ou=Groups,dc=example,dc=org", group),
+ roleTopermissions.keySet()
+ );
+ byte[] groupMappingBytes = jsonMapper.writeValueAsBytes(groupMapping);
+ HttpUtil.makeRequest(
+ adminClient,
+ HttpMethod.POST,
+ StringUtils.format(
+
"%s/druid-ext/basic-security/authorization/db/ldapauth/groupMappings/%s",
+ config.getCoordinatorUrl(),
+ groupMappingName
+ ),
+ groupMappingBytes
+ );
+ }
+
+ private void assignUserToRole(
+ String user,
+ String role
+ )
+ {
+ HttpUtil.makeRequest(
+ adminClient,
+ HttpMethod.POST,
+ StringUtils.format(
+ "%s/druid-ext/basic-security/authorization/db/ldapauth/users/%s",
+ config.getCoordinatorUrl(),
+ user
+ ),
+ null
+ );
+
+ HttpUtil.makeRequest(
+ adminClient,
+ HttpMethod.POST,
+ StringUtils.format(
+
"%s/druid-ext/basic-security/authorization/db/ldapauth/users/%s/roles/%s",
+ config.getCoordinatorUrl(),
+ user,
+ role
+ ),
+ null
+ );
+ }
+
+ @Override
+ String getAuthenticatorName()
+ {
+ return LDAP_AUTHENTICATOR;
+ }
+
+ @Override
+ String getAuthorizerName()
+ {
+ return LDAP_AUTHORIZER;
+ }
+
+ @Override
+ String getExpectedAvaticaAuthError()
+ {
+ return EXPECTED_AVATICA_AUTH_ERROR;
+ }
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]