This is an automated email from the ASF dual-hosted git repository.

abhishek pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/druid.git


The following commit(s) were added to refs/heads/master by this push:
     new 4706a4c572 Docker build for the revised ITs (#12707)
4706a4c572 is described below

commit 4706a4c5725211cd1967403c65454c42bfb12343
Author: Paul Rogers <[email protected]>
AuthorDate: Wed Aug 10 01:47:33 2022 -0700

    Docker build for the revised ITs (#12707)
    
    * Docker build for the revised ITs
    
    * Fix POM versions
    
    * Update comments from review suggestions
---
 .travis.yml                                        |  21 +-
 integration-tests-ex/.gitignore                    |   1 +
 integration-tests-ex/README.md                     |  35 +++
 integration-tests-ex/assets/log4j2.xml             |  32 +++
 integration-tests-ex/check-results.sh              |  89 ++++++++
 integration-tests-ex/it-image/build-image.sh       |  49 +++++
 integration-tests-ex/it-image/docker-build.sh      |  56 +++++
 integration-tests-ex/it-image/docker/Dockerfile    |  73 +++++++
 integration-tests-ex/it-image/docker/druid.sh      |  93 ++++++++
 integration-tests-ex/it-image/docker/launch.sh     | 104 +++++++++
 integration-tests-ex/it-image/docker/test-setup.sh |  79 +++++++
 integration-tests-ex/it-image/pom.xml              | 231 ++++++++++++++++++++
 integration-tests-ex/it-image/rebuild.sh           |  48 +++++
 .../it-image/scripts/build-shared.sh               |  40 ++++
 integration-tests-ex/it-tools/README.md            |  48 +++++
 integration-tests-ex/it-tools/pom.xml              | 155 ++++++++++++++
 .../druid/testing/tools/CliCustomNodeRole.java     | 176 +++++++++++++++
 .../tools/CliHistoricalForQueryErrorTest.java      |  57 +++++
 .../testing/tools/CustomNodeRoleClientModule.java  |  48 +++++
 .../tools/CustomNodeRoleCommandCreator.java        |  33 +++
 .../tools/QueryRetryTestCommandCreator.java        |  33 +++
 .../tools/ServerManagerForQueryErrorTest.java      | 238 +++++++++++++++++++++
 .../apache/druid/testing/tools/SleepExprMacro.java | 110 ++++++++++
 .../apache/druid/testing/tools/SleepModule.java    |  45 ++++
 .../testing/tools/SleepOperatorConversion.java     |  62 ++++++
 .../org/apache/druid/testing/tools/package.java    |  25 +++
 .../org.apache.druid.cli.CliCommandCreator         |  17 ++
 .../org.apache.druid.initialization.DruidModule    |  17 ++
 pom.xml                                            |   4 +
 29 files changed, 2016 insertions(+), 3 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index aed248afdf..9ca6fee1ad 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -46,7 +46,7 @@ addons:
 # Add various options to make 'mvn install' fast and skip javascript compile 
(-Ddruid.console.skip=true) since it is not
 # needed. Depending on network speeds, "mvn -q install" may take longer than 
the default 10 minute timeout to print any
 # output.  To compensate, use travis_wait to extend the timeout.
-install: ./check_test_suite.py && travis_terminate 0  || echo 'Running Maven 
install...' && MAVEN_OPTS='-Xmx3000m' travis_wait 15 ${MVN} clean install -q 
-ff -pl '!distribution' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} -T1C && ${MVN} 
install -q -ff -pl 'distribution' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS}
+install: ./check_test_suite.py && travis_terminate 0  || echo 'Running Maven 
install...' && MAVEN_OPTS='-Xmx3000m' travis_wait 15 ${MVN} clean install -q 
-ff -pl '!distribution,!:it-tools,!:it-image' ${MAVEN_SKIP} ${MAVEN_SKIP_TESTS} 
-T1C && ${MVN} install -q -ff -pl 'distribution' ${MAVEN_SKIP} 
${MAVEN_SKIP_TESTS}
 
 # There are 3 stages of tests
 # 1. Tests - phase 1
@@ -72,7 +72,7 @@ jobs:
     - name: "animal sniffer checks"
       stage: Tests - phase 1
       script: ${MVN} animal-sniffer:check --fail-at-end
-
+      
     - name: "checkstyle"
       script: ${MVN} checkstyle:checkstyle --fail-at-end
 
@@ -347,7 +347,7 @@ jobs:
       <<: *test_processing_module
       name: "(openjdk8) other modules test"
       env:
-        - 
MAVEN_PROJECTS='!processing,!indexing-hadoop,!indexing-service,!extensions-core/kafka-indexing-service,!extensions-core/kinesis-indexing-service,!server,!web-console,!integration-tests'
+        - 
MAVEN_PROJECTS='!processing,!indexing-hadoop,!indexing-service,!extensions-core/kafka-indexing-service,!extensions-core/kinesis-indexing-service,!server,!web-console,!integration-tests,!:it-image,!:it-tools'
 
     - <<: *test_other_modules
       name: "(openjdk11) other modules test"
@@ -755,6 +755,21 @@ jobs:
       env: TESTNG_GROUPS='-Dgroups=query' JVM_RUNTIME='-Djvm.runtime=11' 
USE_INDEXER='middleManager' MYSQL_DRIVER_CLASSNAME='org.mariadb.jdbc.Driver' 
OVERRIDE_CONFIG_PATH='./environment-configs/test-groups/prepopulated-data'
 
     # END - Integration tests for Compile with Java 8 and Run with Java 11
+    
+    # BEGIN - Revised integration tests
+
+    # Experimental build of the revised integration test Docker image.
+    # Actual tests will come later.
+    - name: "experimental docker tests"
+      stage: Tests - phase 2
+      # Uses the install defined above. Then, builds the test tools and docker 
image,
+      # and run the various IT tests. If tests fail, echos log lines of any of
+      # the Druid services that did not exit normally.
+      # Run though install to ensure the test tools are installed, and the 
docker
+      # image is built. The tests only need verify.
+      script: ${MVN} install -P dist,test-image -rf :distribution 
${MAVEN_SKIP} -DskipUTs=true
+    
+    # END - Revised integration tests
 
     - &integration_batch_index_k8s
       name: "(Compile=openjdk8, Run=openjdk8, Cluster Build On K8s) 
ITNestedQueryPushDownTest integration test"
diff --git a/integration-tests-ex/.gitignore b/integration-tests-ex/.gitignore
new file mode 100644
index 0000000000..ae3c172604
--- /dev/null
+++ b/integration-tests-ex/.gitignore
@@ -0,0 +1 @@
+/bin/
diff --git a/integration-tests-ex/README.md b/integration-tests-ex/README.md
new file mode 100644
index 0000000000..9619897bae
--- /dev/null
+++ b/integration-tests-ex/README.md
@@ -0,0 +1,35 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+# Revised Integration Tests
+
+This directory builds a Docker image for Druid. Later revisions
+use the image to run revised integration tests.
+
+The `it-base` project is built as part of the normal build,
+though it is used only for the Docker image.
+
+To build the image:
+
+```bash
+mvn $USUAL_CAVEATS -P test-image
+```
+
+Where `$USUAL_CAVEATS` are your favorite options to turn
+off static checks, UTs, etc.
\ No newline at end of file
diff --git a/integration-tests-ex/assets/log4j2.xml 
b/integration-tests-ex/assets/log4j2.xml
new file mode 100644
index 0000000000..dbce142e7f
--- /dev/null
+++ b/integration-tests-ex/assets/log4j2.xml
@@ -0,0 +1,32 @@
+<?xml version="1.0" encoding="UTF-8" ?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<Configuration status="WARN">
+  <Appenders>
+    <Console name="Console" target="SYSTEM_OUT">
+      <PatternLayout pattern="%d{ISO8601} %p [%t] %c - %m%n"/>
+    </Console>
+  </Appenders>
+  <Loggers>
+    <Root level="info">
+      <AppenderRef ref="Console"/>
+    </Root>
+  </Loggers>
+</Configuration>
diff --git a/integration-tests-ex/check-results.sh 
b/integration-tests-ex/check-results.sh
new file mode 100755
index 0000000000..44aac0aa9f
--- /dev/null
+++ b/integration-tests-ex/check-results.sh
@@ -0,0 +1,89 @@
+#! /bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#--------------------------------------------------------------------
+
+# Run from Travis which has no good way to attach logs to a
+# build. Instead, we check if any IT failed. If so, we append
+# the last 100 lines of each server log to stdout. We have to
+# stay wihtin the 4MB limit which Travis applies, so we only
+# emit logs for the first failure, and only for servers that
+# don't report normal completion.
+#
+# The only good way to check for test failures is to parse
+# the Failsafe summary for each test located in
+# <project>/target/failsafe-reports/failsafe-summary.xml
+#
+# This directory has many subdirectories, some of which are
+# tests. We rely on the fact that a test starts with "it-" AND
+# contains a failsafe report. (Some projects start with "it-"
+# but are not tests.)
+
+# Run in the docker-tests directory
+cd $(dirname $0)
+
+# Scan for candidate projects
+for PROJECT in it-*
+do
+       # Check if a failsafe report exists. It will exist if the directory is
+       # a test project and failsafe ran on that directory.
+       REPORTS="$PROJECT/target/failsafe-reports/failsafe-summary.xml"
+       if [ -f "$REPORTS" ]
+       then
+               # OK, so Bash isn't the world's best text processing language...
+               ERRS=1
+               FAILS=1
+               while IFS= read -r line
+           do
+               if [ "$line" = "    <errors>0</errors>" ]
+               then
+                       ERRS=0
+               fi
+               if [ "$line" = "    <failures>0</failures>" ]
+               then
+                       FAILS=0
+               fi
+               done < "$REPORTS"
+               if [ $ERRS -eq 1 -o $FAILS -eq 1 ]
+           then
+               FOUND_LOGS=0
+               echo "======= $PROJECT Failed =========="
+               # All logs except zookeeper
+               for log in $(ls $PROJECT/target/shared/logs/[a-y]*.log)
+               do
+                       # We assume that a successful exit includes a line with 
the
+                       # following:
+                       # Stopping lifecycle [module] stage [INIT]
+                       tail -5 "$log" | grep -Fq 'Stopping lifecycle [module] 
stage [INIT]'
+                       if [ $? -ne 0 ]
+                       then
+                               # Assume failure and report tail
+                          echo $(basename $log) "logtail 
========================"
+                          tail -100 "$log"
+                          FOUND_LOGS=1
+                       fi
+               done
+
+               # Only emit the first failure to avoid output bloat
+               if [ $FOUND_LOGS -eq 1 ]
+               then
+                       exit 0
+               else
+                       echo "All Druid services exited normally."
+               fi
+           fi
+       fi
+done
diff --git a/integration-tests-ex/it-image/build-image.sh 
b/integration-tests-ex/it-image/build-image.sh
new file mode 100755
index 0000000000..0b37f24777
--- /dev/null
+++ b/integration-tests-ex/it-image/build-image.sh
@@ -0,0 +1,49 @@
+#! /usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#--------------------------------------------------------------------
+
+# Prepare the image contents and build the Druid image.
+# Since Docker requires all contents to be in or below the
+# working directory, we assemble the contents in target/docker.
+
+# Fail fast on any error
+set -e
+
+# Enable for tracing
+#set -x
+
+# Fail on unset environment variables
+set -u
+
+SCRIPT_DIR=$(cd $(dirname $0) && pwd)
+
+# Copy environment variables to a file. Used for manual rebuilds
+# and by scripts that start the test cluster.
+
+cat > $TARGET_DIR/env.sh << EOF
+export ZK_VERSION=$ZK_VERSION
+export KAFKA_VERSION=$KAFKA_VERSION
+export DRUID_VERSION=$DRUID_VERSION
+export MYSQL_VERSION=$MYSQL_VERSION
+export MYSQL_IMAGE_VERSION=$MYSQL_IMAGE_VERSION
+export CONFLUENT_VERSION=$CONFLUENT_VERSION
+export MARIADB_VERSION=$MARIADB_VERSION
+export HADOOP_VERSION=$HADOOP_VERSION
+export MYSQL_DRIVER_CLASSNAME=$MYSQL_DRIVER_CLASSNAME
+export DRUID_IT_IMAGE_NAME=$DRUID_IT_IMAGE_NAME
+EOF
+
+exec bash $SCRIPT_DIR/docker-build.sh
diff --git a/integration-tests-ex/it-image/docker-build.sh 
b/integration-tests-ex/it-image/docker-build.sh
new file mode 100755
index 0000000000..6a945aa612
--- /dev/null
+++ b/integration-tests-ex/it-image/docker-build.sh
@@ -0,0 +1,56 @@
+#! /usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#--------------------------------------------------------------------
+
+# Invokes Docker to build the image. The environment should have been
+# setup from Maven via build-image.sh or manually via quick-build.sh.
+
+# Print environment for debugging
+#env
+
+# Enable for tracing
+#set -x
+
+SCRIPT_DIR=$(cd $(dirname $0) && pwd)
+
+# Maven should have created the docker dir with the needed
+# dependency jars. If doing this by hand, run Maven once to
+# populate these jars.
+if [ ! -d $TARGET_DIR/docker ]; then
+       echo "$TARGET_DIR/docker does not exist. It should contain dependency 
jars" 1>&2
+       exit 1
+fi
+
+# Create the run-specific docker directory
+mkdir -p $TARGET_DIR/docker
+cp -r docker/* $TARGET_DIR/docker
+cd $TARGET_DIR/docker
+
+# Grab the distribution if needed (skipped if no change.)
+DISTRIB_FILE=apache-druid-$DRUID_VERSION-bin.tar.gz
+SOURCE_FILE=$PARENT_DIR/distribution/target/$DISTRIB_FILE
+if [[ ! -f $DISTRIB_FILE || $SOURCE_FILE -nt $DISTRIB_FILE ]]; then
+       cp $SOURCE_FILE .
+fi
+
+docker build -t $DRUID_IT_IMAGE_NAME \
+       --build-arg DRUID_VERSION=$DRUID_VERSION \
+       --build-arg MYSQL_VERSION=$MYSQL_VERSION \
+       --build-arg MARIADB_VERSION=$MARIADB_VERSION \
+       --build-arg CONFLUENT_VERSION=$CONFLUENT_VERSION \
+       --build-arg HADOOP_VERSION=$HADOOP_VERSION \
+       --build-arg MYSQL_DRIVER_CLASSNAME=$MYSQL_DRIVER_CLASSNAME \
+       .
diff --git a/integration-tests-ex/it-image/docker/Dockerfile 
b/integration-tests-ex/it-image/docker/Dockerfile
new file mode 100644
index 0000000000..e5ee41ff0c
--- /dev/null
+++ b/integration-tests-ex/it-image/docker/Dockerfile
@@ -0,0 +1,73 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements.  See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership.  The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License.  You may obtain a copy of the License at
+#
+#   http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied.  See the License for the
+# specific language governing permissions and limitations
+# under the License.
+#-------------------------------------------------------------------------
+
+# Builds the Druid image for testing. Does not include dependent
+# tools such as MySQL, Zookeeper or Kafka: those reside in their own images.
+#
+# The script assumes that Maven as placed a Druid distribution and the
+# required extra jars in the same location as this file: in the target/docker
+# directory. You must run Maven once to populate these files after each
+# build. After that first image build, you can use rebuild.sh to do follow-on
+# image builds.
+
+# This Dockerfile prefers to use the COPY command over ADD.
+# See: https://phoenixnap.com/kb/docker-add-vs-copy
+
+ARG JDK_VERSION=8-slim-buster
+
+# The FROM image provides Java on top of Debian, and
+# thus provides bash, apt-get, etc.
+# See https://hub.docker.com/_/openjdk
+
+FROM openjdk:$JDK_VERSION
+
+ARG DRUID_VERSION
+ENV DRUID_VERSION=$DRUID_VERSION
+ARG CONFLUENT_VERSION
+ENV CONFLUENT_VERSION=$CONFLUENT_VERSION
+ARG MYSQL_VERSION
+ENV MYSQL_VERSION=$MYSQL_VERSION
+ARG MARIADB_VERSION
+ENV MARIADB_VERSION=$MARIADB_VERSION
+ARG MYSQL_DRIVER_CLASSNAME=com.mysql.jdbc.Driver
+ENV MYSQL_DRIVER_CLASSNAME=$MYSQL_DRIVER_CLASSNAME
+
+ENV DRUID_HOME=/usr/local/druid
+
+# Populate build artifacts
+
+COPY apache-druid-${DRUID_VERSION}-bin.tar.gz /usr/local/
+COPY it-tools-${DRUID_VERSION}.jar /tmp/druid/extensions/it-tools/
+COPY kafka-protobuf-provider-${CONFLUENT_VERSION}.jar /tmp/druid/lib/
+COPY mysql-connector-java-${MYSQL_VERSION}.jar /tmp/druid/lib/
+COPY mariadb-java-client-${MARIADB_VERSION}.jar /tmp/druid/lib/
+COPY test-setup.sh /
+COPY druid.sh /
+COPY launch.sh /
+
+# Do the setup tasks. The tasks are done within a script, rather than
+# here, so they are easier to describe and debug. Turn on the "-x" flag
+# within the script to trace the steps if needed for debugging.
+
+RUN bash /test-setup.sh
+
+# Start in the Druid directory.
+
+USER druid:druid
+WORKDIR /
+ENTRYPOINT [ "bash", "/launch.sh" ]
diff --git a/integration-tests-ex/it-image/docker/druid.sh 
b/integration-tests-ex/it-image/docker/druid.sh
new file mode 100644
index 0000000000..22b86a3316
--- /dev/null
+++ b/integration-tests-ex/it-image/docker/druid.sh
@@ -0,0 +1,93 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#-------------------------------------------------------------------------
+
+# Launches Druid within the container. Based on the script in the original
+# ITs, which in turn is based the distribution/docker/druid.sh script.
+#
+# The key bit of functionality is to translate config passed in as
+# environment variables (from the Docker compose file) to a runtime.properties
+# file which Druid will load. When run in Docker, there is just one server
+# per container, so we put the runtime.properties file in the same location,
+# /tmp/druid/conf, in each container, and we omit the common.runtime.properties
+# file.
+
+# Fail fast on any error
+set -e
+
+getConfPath()
+{
+    cluster_conf_base=/tmp/conf/druid
+    case "$1" in
+    _common) echo $cluster_conf_base/_common ;;
+    historical) echo $cluster_conf_base/data/historical ;;
+    historical-for-query-error-test) echo $cluster_conf_base/data/historical ;;
+    middleManager) echo $cluster_conf_base/data/middleManager ;;
+    indexer) echo $cluster_conf_base/data/indexer ;;
+    coordinator) echo $cluster_conf_base/master/coordinator ;;
+    broker) echo $cluster_conf_base/query/broker ;;
+    router) echo $cluster_conf_base/query/router ;;
+    overlord) echo $cluster_conf_base/master/overlord ;;
+    *) echo $cluster_conf_base/misc/$1 ;;
+    esac
+}
+
+# Delete the old key (if existing) and append new key=value
+setKey()
+{
+    service="$1"
+    key="$2"
+    value="$3"
+    service_conf=$(getConfPath $service)/runtime.properties
+    # Delete from all
+    sed -ri "/$key=/d" $COMMON_CONF_DIR/common.runtime.properties
+    [ -f $service_conf ] && sed -ri "/$key=/d" $service_conf
+    [ -f $service_conf ] && echo "$key=$value" >>$service_conf
+    [ -f $service_conf ] || echo "$key=$value" 
>>$COMMON_CONF_DIR/common.runtime.properties
+
+    #echo "Setting $key=$value in $service_conf"
+}
+
+setupConfig()
+{
+  echo "$(date -Is) configuring service $DRUID_SERVICE"
+
+  # We put all the config in /tmp/conf to allow for a
+  # read-only root filesystem
+  mkdir -p /tmp/conf/druid
+
+  COMMON_CONF_DIR=$(getConfPath _common)
+  SERVICE_CONF_DIR=$(getConfPath ${DRUID_SERVICE})
+
+  mkdir -p $COMMON_CONF_DIR
+  mkdir -p $SERVICE_CONF_DIR
+  touch $COMMON_CONF_DIR/common.runtime.properties
+  touch $SERVICE_CONF_DIR/runtime.properties
+
+  setKey $DRUID_SERVICE druid.host $(hostname -i)
+  setKey $DRUID_SERVICE druid.worker.ip $(hostname -i)
+
+  # Write out all the environment variables starting with druid_ to druid 
service config file
+  # This will replace _ with . in the key
+  env | grep ^druid_ | while read evar;
+  do
+      # Can't use IFS='=' to parse since var might have = in it (e.g. password)
+      val=$(echo "$evar" | sed -e 's?[^=]*=??')
+      var=$(echo "$evar" | sed -e 's?^\([^=]*\)=.*?\1?g' -e 's?_?.?g')
+      setKey $DRUID_SERVICE "$var" "$val"
+  done
+}
diff --git a/integration-tests-ex/it-image/docker/launch.sh 
b/integration-tests-ex/it-image/docker/launch.sh
new file mode 100644
index 0000000000..4b8d0293c8
--- /dev/null
+++ b/integration-tests-ex/it-image/docker/launch.sh
@@ -0,0 +1,104 @@
+#! /usr/bin/env bash
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#-------------------------------------------------------------------------
+
+# Launch script which runs inside the container to set up configuration
+# and then launch Druid itself.
+
+# Fail fast on any error
+set -e
+
+# Enable for tracing
+#set -x
+
+# Dump the environment for debugging.
+#env
+
+# Launch Druid within the container.
+cd /
+
+# TODO: enable only for security-related tests?
+#/tls/generate-server-certs-and-keystores.sh
+. /druid.sh
+
+# The image contains both the MySQL and MariaDB JDBC drivers.
+# The MySQL driver is selected by the Docker Compose file.
+# Set  druid.metadata.mysql.driver.driverClassName to the preferred
+# driver.
+
+# Create druid service config files with all the config variables
+setupConfig
+
+# Export the service config file path to use in supervisord conf file
+DRUID_SERVICE_CONF_DIR="$(. /druid.sh; getConfPath ${DRUID_SERVICE})"
+
+# Export the common config file path to use in supervisord conf file
+DRUID_COMMON_CONF_DIR="$(. /druid.sh; getConfPath _common)"
+
+SHARED_DIR=/shared
+LOG_DIR=$SHARED_DIR/logs
+DRUID_HOME=/usr/local/druid
+
+# For multiple nodes of the same type to create a unique name
+INSTANCE_NAME=$DRUID_SERVICE
+if [ -n "$DRUID_INSTANCE" ]; then
+       INSTANCE_NAME=${DRUID_SERVICE}-$DRUID_INSTANCE
+fi
+
+# Assemble Java options
+JAVA_OPTS="$DRUID_SERVICE_JAVA_OPTS $DRUID_COMMON_JAVA_OPTS 
-XX:HeapDumpPath=$LOG_DIR/$INSTANCE_NAME $DEBUG_OPTS"
+LOG4J_CONFIG=$SHARED_DIR/conf/log4j2.xml
+if [ -f $LOG4J_CONFIG ]; then
+       JAVA_OPTS="$JAVA_OPTS -Dlog4j.configurationFile=$LOG4J_CONFIG"
+fi
+
+# The env-to-config scripts creates a single config file.
+# The common one is empty, but Druid still wants to find it,
+# so we add it to the class path anyway.
+CP=$DRUID_COMMON_CONF_DIR:$DRUID_SERVICE_CONF_DIR:${DRUID_HOME}/lib/\*
+if [ -n "$DRUID_CLASSPATH" ]; then
+       CP=$CP:$DRUID_CLASSPATH
+fi
+HADOOP_XML=$SHARED_DIR/hadoop-xml
+if [ -d $HADOOP_XML ]; then
+       CP=$HADOOP_XML:$CP
+fi
+
+# For jar files
+EXTRA_LIBS=$SHARED_DIR/lib
+if [ -d $EXTRA_LIBS ]; then
+       CP=$CP:${EXTRA_LIBS}/\*
+fi
+
+# For resources on the class path
+EXTRA_RESOURCES=$SHARED_DIR/resources
+if [ -d $EXTRA_RESOURCES ]; then
+       CP=$CP:$EXTRA_RESOURCES
+fi
+
+LOG_FILE=$LOG_DIR/${INSTANCE_NAME}.log
+echo "" >> $LOG_FILE
+echo "--- Service runtime.properties ---" >> $LOG_FILE
+cat $DRUID_SERVICE_CONF_DIR/*.properties >> $LOG_FILE
+echo "---" >> $LOG_FILE
+echo "" >> $LOG_FILE
+
+# Run Druid service
+cd $DRUID_HOME
+exec java $JAVA_OPTS -cp $CP \
+       org.apache.druid.cli.Main server $DRUID_SERVICE \
+       >> $LOG_FILE 2>&1
diff --git a/integration-tests-ex/it-image/docker/test-setup.sh 
b/integration-tests-ex/it-image/docker/test-setup.sh
new file mode 100644
index 0000000000..ba4a808a2e
--- /dev/null
+++ b/integration-tests-ex/it-image/docker/test-setup.sh
@@ -0,0 +1,79 @@
+#! /usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#-------------------------------------------------------------------------
+
+# This script runs inside the container to prepare the Druid test image.
+
+# Fail fast on any error
+set -e
+
+# Fail on unset environment variables
+set -u
+
+# Enable for tracing
+#set -x
+
+# For debugging: verify environment
+#env
+
+# Druid system user
+adduser --system --group --no-create-home druid
+
+# Adjust ownership of the Druid launch script.
+cd /
+chmod +x launch.sh
+chown druid:druid launch.sh druid.sh
+
+# Convenience script to run Druid for tools.
+# Expands the env vars into the script for stability.
+# Maybe not needed now?
+cat > /run-druid.sh << EOF
+#! /bin/bash
+
+java -cp "${DRUID_HOME}/lib/*" \\
+       -Ddruid.extensions.directory=${DRUID_HOME}/extensions \\
+       -Ddruid.extensions.loadList='["mysql-metadata-storage"]' \\
+       -Ddruid.metadata.storage.type=mysql \\
+       -Ddruid.metadata.mysql.driver.driverClassName=$MYSQL_DRIVER_CLASSNAME \\
+       \$*
+EOF
+chmod a+x /run-druid.sh
+
+# Install Druid, owned by user:group druid:druid
+# The original Druid directory contains only
+# libraries. No extensions should be present: those
+# should be added in this step.
+cd /usr/local/
+
+tar -xzf apache-druid-${DRUID_VERSION}-bin.tar.gz
+rm apache-druid-${DRUID_VERSION}-bin.tar.gz
+
+ls -l /tmp/druid
+
+# Add extra libraries and extensions.
+mv /tmp/druid/lib/* apache-druid-${DRUID_VERSION}/lib
+mv /tmp/druid/extensions/* apache-druid-${DRUID_VERSION}/extensions
+
+# The whole shebang is owned by druid.
+chown -R druid:druid apache-druid-${DRUID_VERSION}
+
+# Leave the versioned directory, create a symlink to $DRUID_HOME.
+ln -s apache-druid-${DRUID_VERSION} $DRUID_HOME
+
+# Clean up time
+# Should be nothing to clean...
+rm -rf /tmp/*
+rm -rf /var/tmp/*
diff --git a/integration-tests-ex/it-image/pom.xml 
b/integration-tests-ex/it-image/pom.xml
new file mode 100644
index 0000000000..cd0d4e4e27
--- /dev/null
+++ b/integration-tests-ex/it-image/pom.xml
@@ -0,0 +1,231 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<!--
+Builds the Docker image used for integration testing.
+
+* Removes the image in the clean phase.
+* Uses the Druid build from the distribution project.
+* Assembles the set of needed files in the compile phase.
+* Builds the Docker image in the install phase.
+
+If you have already built Druid up to the distribution install phase,
+then this project can be run over and over if you need to debug
+changes to the image.
+
+Reference: https://dzone.com/articles/build-docker-image-from-maven
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+    <modelVersion>4.0.0</modelVersion>
+
+    <packaging>pom</packaging>
+
+    <artifactId>it-image</artifactId>
+    <name>it-image</name>
+    <description>Build the Docker image for integration tests.</description>
+
+    <parent>
+        <artifactId>druid</artifactId>
+        <groupId>org.apache.druid</groupId>
+        <version>0.24.0-SNAPSHOT</version>
+        <relativePath>../../pom.xml</relativePath>
+    </parent>
+
+    <properties>
+        <!-- Default image name when run locally. When run as part of a build, 
the
+             image name will typically be provided by the build enfironment, 
and will
+             override this default name. -->
+        
<druid.it.image-name>${project.groupId}/test:${project.version}</druid.it.image-name>
+        <confluent-version>5.5.1</confluent-version>
+        <mariadb.version>2.7.3</mariadb.version>
+        <mysql.image.version>5.7-debian</mysql.image.version>
+        
<hadoop.integ.libs>"org.apache.hadoop:hadoop-client:${hadoop.compile.version}", 
"org.apache.hadoop:hadoop-azure:${hadoop.compile.version}"</hadoop.integ.libs>
+        
<hadoop.s3.impl>org.apache.hadoop.fs.s3native.NativeS3FileSystem</hadoop.s3.impl>
+    </properties>
+
+    <repositories>
+        <!-- For the Kafka protobuf provider. -->
+        <repository>
+            <id>confluent</id>
+            <name>Confluent</name>
+            <url>https://packages.confluent.io/maven/</url>
+        </repository>
+    </repositories>
+
+    <profiles>
+        <profile>
+            <id>test-image</id>
+            <activation>
+                <activeByDefault>false</activeByDefault>
+            </activation>
+
+            <!-- Dependencies are inside the profile so they are ignored if 
not building
+                 the image. This is required for the "animal-sniffer" stage in 
the build
+                 which builds everything except distribution, so our 
distribution dependency
+                 fails if not scoped wihtin the profile. -->
+            <dependencies>
+                <!-- Dependencies placed in the image. -->
+                <dependency>
+                    <groupId>org.apache.druid</groupId>
+                    <artifactId>distribution</artifactId>
+                    <version>${project.parent.version}</version>
+                    <type>pom</type>
+                </dependency>
+                <dependency>
+                    <groupId>org.apache.druid</groupId>
+                    <artifactId>it-tools</artifactId>
+                    <version>${project.parent.version}</version>
+                </dependency>
+                <dependency>
+                    <groupId>io.confluent</groupId>
+                    <artifactId>kafka-protobuf-provider</artifactId>
+                    <version>${confluent-version}</version>
+                </dependency>
+                <!-- Tests can choose either the MySQL or MariaDB driver. -->
+                <dependency>
+                    <groupId>mysql</groupId>
+                    <artifactId>mysql-connector-java</artifactId>
+                    <version>${mysql.version}</version>
+                </dependency>
+                <dependency>
+                    <groupId>org.mariadb.jdbc</groupId>
+                    <artifactId>mariadb-java-client</artifactId>
+                    <version>${mariadb.version}</version>
+                </dependency>
+            </dependencies>
+
+            <build>
+                <plugins>
+                    <!-- Populate the target/docker directory with the MySQL
+                       and Kafka clients. -->
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-dependency-plugin</artifactId>
+                        <version>3.3.0</version>
+                        <executions>
+                            <execution>
+                                <id>copy</id>
+                                <phase>prepare-package</phase>
+                                <goals>
+                                    <goal>copy</goal>
+                                </goals>
+                                <configuration>
+                                    <artifactItems>
+                                        <artifactItem>
+                                            <groupId>mysql</groupId>
+                                            
<artifactId>mysql-connector-java</artifactId>
+                                            <version>${mysql.version}</version>
+                                            <type>jar</type>
+                                            <overWrite>true</overWrite>
+                                            
<outputDirectory>${project.build.directory}/docker</outputDirectory>
+                                        </artifactItem>
+                                        <artifactItem>
+                                            <groupId>org.mariadb.jdbc</groupId>
+                                            
<artifactId>mariadb-java-client</artifactId>
+                                            
<version>${mariadb.version}</version>
+                                            <type>jar</type>
+                                            <overWrite>true</overWrite>
+                                            
<outputDirectory>${project.build.directory}/docker</outputDirectory>
+                                        </artifactItem>
+                                        <artifactItem>
+                                            <groupId>io.confluent</groupId>
+                                            
<artifactId>kafka-protobuf-provider</artifactId>
+                                            
<version>${confluent-version}</version>
+                                            <type>jar</type>
+                                            <overWrite>true</overWrite>
+                                            
<outputDirectory>${project.build.directory}/docker</outputDirectory>
+                                        </artifactItem>
+                                        <artifactItem>
+                                            <groupId>org.apache.druid</groupId>
+                                            <artifactId>it-tools</artifactId>
+                                            
<version>${project.version}</version>
+                                            <type>jar</type>
+                                            <overWrite>true</overWrite>
+                                            
<outputDirectory>${project.build.directory}/docker</outputDirectory>
+                                        </artifactItem>
+                                    </artifactItems>
+                                </configuration>
+                            </execution>
+                        </executions>
+                    </plugin>
+                    <plugin>
+                        <groupId>org.codehaus.mojo</groupId>
+                        <artifactId>exec-maven-plugin</artifactId>
+                        <executions>
+
+                            <!-- Remove existing image from local repo -->
+                            <execution>
+                                <id>docker-clean</id>
+                                <phase>clean</phase>
+                                <goals>
+                                    <goal>exec</goal>
+                                </goals>
+                                <configuration>
+                                    <executable>docker</executable>
+                                    
<workingDirectory>${project.basedir}</workingDirectory>
+                                    <arguments>
+                                        <argument>rmi</argument>
+                                        <argument>-f</argument> <!-- Prevent 
error if no image. -->
+                                        
<argument>${druid.it.image-name}</argument>
+                                    </arguments>
+                                </configuration>
+                            </execution>
+
+                            <!-- Create the docker image via a script which 
will also grab
+                                 the distribution tarball. -->
+                            <execution>
+                                <id>build-image</id>
+                                <phase>install</phase>
+                                <goals>
+                                    <goal>exec</goal>
+                                </goals>
+                                <configuration>
+                                    <environmentVariables>
+                                        
<DRUID_VERSION>${project.version}</DRUID_VERSION>
+                                        
<MYSQL_VERSION>${mysql.version}</MYSQL_VERSION>
+                                        
<MYSQL_IMAGE_VERSION>${mysql.image.version}</MYSQL_IMAGE_VERSION>
+                                        
<MARIADB_VERSION>${mariadb.version}</MARIADB_VERSION>
+                                        
<KAFKA_VERSION>${apache.kafka.version}</KAFKA_VERSION>
+                                        
<CONFLUENT_VERSION>${confluent-version}</CONFLUENT_VERSION>
+                                        
<ZK_VERSION>${zookeeper.version}</ZK_VERSION>
+                                        
<HADOOP_VERSION>${hadoop.compile.version}</HADOOP_VERSION>
+                                        
<MYSQL_DRIVER_CLASSNAME>com.mysql.jdbc.Driver</MYSQL_DRIVER_CLASSNAME>
+                                        
<DRUID_IT_IMAGE_NAME>${druid.it.image-name}</DRUID_IT_IMAGE_NAME>
+                                        
<TARGET_DIR>${project.build.directory}</TARGET_DIR>
+                                        <!-- Maven has no good way to get the 
root directory, so
+                                             this is as close as we can get. 
-->
+                                        
<PARENT_DIR>${project.basedir}/../..</PARENT_DIR>
+                                    </environmentVariables>
+                                    
<workingDirectory>${project.basedir}</workingDirectory>
+                                    <executable>bash</executable>
+                                    <arguments>
+                                        <argument>build-image.sh</argument>
+                                    </arguments>
+                                </configuration>
+                            </execution>
+
+                        </executions>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+    </profiles>
+</project>
diff --git a/integration-tests-ex/it-image/rebuild.sh 
b/integration-tests-ex/it-image/rebuild.sh
new file mode 100755
index 0000000000..f8ba472049
--- /dev/null
+++ b/integration-tests-ex/it-image/rebuild.sh
@@ -0,0 +1,48 @@
+#! /bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#--------------------------------------------------------------------
+
+# Rebuilds the docker image outside of Maven for debugging. Use Maven
+# to build the image the first time within a branch. Maven sets
+# the required environment variables, then calls build-image.sh which
+# creates the target/env.sh file which captures the environment
+# variables. env.sh is used to launch tests, but it also allows you to
+# take a faster shortcut if you need to rebuild the image, such as when
+# debugging changes to the image. This script reuses those environment
+# variables and then invokes the Docker build script.
+#
+# You only need to run from Maven if you switch branches or otherwise
+# change the software versions recorded in env.sh.
+
+SCRIPT_DIR=$(cd $(dirname $0) && pwd)
+
+# Target directory. Maven ${project.build.directory}
+# Example is for the usual setup.
+export TARGET_DIR=$SCRIPT_DIR/target
+
+if [ ! -f $TARGET_DIR/env.sh ]; then
+       echo "Please run mvn -P test-image install once before rebuilding" 1>&2
+       exit 1
+fi
+
+source $TARGET_DIR/env.sh
+
+# Directory of the parent Druid pom.xml file.
+# Unbeliebably hard to get from Maven itself.
+export PARENT_DIR=$SCRIPT_DIR/../..
+
+exec bash $SCRIPT_DIR/docker-build.sh
diff --git a/integration-tests-ex/it-image/scripts/build-shared.sh 
b/integration-tests-ex/it-image/scripts/build-shared.sh
new file mode 100644
index 0000000000..cea4005482
--- /dev/null
+++ b/integration-tests-ex/it-image/scripts/build-shared.sh
@@ -0,0 +1,40 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This script assumes that the working directory is
+# $DRUID_DEV/druid-integration-tests
+
+SHARED_DIR=target/shared
+mkdir -p $SHARED_DIR/hadoop_xml
+mkdir -p $SHARED_DIR/hadoop-dependencies
+mkdir -p $SHARED_DIR/logs
+mkdir -p $SHARED_DIR/tasklogs
+mkdir -p $SHARED_DIR/docker/extensions
+mkdir -p $SHARED_DIR/docker/credentials
+
+DRUID_DEV=../..
+
+# Setup client keystore
+./docker/tls/generate-client-certs-and-keystores.sh
+
+# One of the integration tests needs the wikiticker sample data
+
+DATA_DIR=$SHARED_DIR/wikiticker-it
+mkdir -p $DATA_DIR
+cp 
$DRUID_DEV/examples/quickstart/tutorial/wikiticker-2015-09-12-sampled.json.gz 
$DATA_DIR/wikiticker-2015-09-12-sampled.json.gz
+cp test-data/wiki-simple-lookup.json $DATA_DIR/wiki-simple-lookup.json
+cp test-data/wikipedia.desc $DATA_DIR/wikipedia.desc
diff --git a/integration-tests-ex/it-tools/README.md 
b/integration-tests-ex/it-tools/README.md
new file mode 100644
index 0000000000..cb8f435358
--- /dev/null
+++ b/integration-tests-ex/it-tools/README.md
@@ -0,0 +1,48 @@
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+# Testing Tools
+
+`it-tools` is a copy of `extensions-core/testing-tools` (module
+name `druid-testing-tools`.)
+
+The testing tools are added to the Druid test Docker image. The
+`druid-testing-tools` module defines most such additions. However,
+`integration-tests` defines a custom node role which also must be
+added to the image. `integration-tests` uses a different mechanism
+to do that addition.
+
+Here, we want a single extension for all the testing gizmos.
+This is a direct copy of the `druid-testing-tools`
+extension, along with a copy of the custom node role from
+`integration-tests`.
+
+The reason this is a copy, rather than fixing up `druid-testing-tools`
+is that the existing `integration-tests` must continue to run and it
+is very difficult to change or test them. (Which is the reason for
+this parallel approach.) To keep backward compatibility, and to avoid
+changing `integration-tests`, we keep the prior approach and make
+copies here for the new approach.
+
+The names should never clash: `it-tools` is only ever used
+within the `docker-test` project, and the `druid-testing-tools` is
+*not* included as a dependency.
+
+Over time, once `integration-tests` are converted, then the
+`druid-testing-tools` module can be deprecated in favor of this one.
diff --git a/integration-tests-ex/it-tools/pom.xml 
b/integration-tests-ex/it-tools/pom.xml
new file mode 100644
index 0000000000..e85620b22c
--- /dev/null
+++ b/integration-tests-ex/it-tools/pom.xml
@@ -0,0 +1,155 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one
+  ~ or more contributor license agreements.  See the NOTICE file
+  ~ distributed with this work for additional information
+  ~ regarding copyright ownership.  The ASF licenses this file
+  ~ to you under the Apache License, Version 2.0 (the
+  ~ "License"); you may not use this file except in compliance
+  ~ with the License.  You may obtain a copy of the License at
+  ~
+  ~   http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing,
+  ~ software distributed under the License is distributed on an
+  ~ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+  ~ KIND, either express or implied.  See the License for the
+  ~ specific language governing permissions and limitations
+  ~ under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0";
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance";
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/maven-v4_0_0.xsd";>
+       <modelVersion>4.0.0</modelVersion>
+
+       <artifactId>it-tools</artifactId>
+       <name>it-tools</name>
+       <description>IT - Server-side tools</description>
+
+       <parent>
+               <groupId>org.apache.druid</groupId>
+        <artifactId>druid</artifactId>
+               <version>0.24.0-SNAPSHOT</version>
+               <relativePath>../../pom.xml</relativePath>
+       </parent>
+
+       <dependencies>
+               <dependency>
+                       <groupId>org.apache.druid</groupId>
+                       <artifactId>druid-processing</artifactId>
+                       <version>${project.parent.version}</version>
+                       <scope>provided</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.apache.druid</groupId>
+                       <artifactId>druid-services</artifactId>
+                       <version>${project.parent.version}</version>
+                       <scope>provided</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.apache.druid</groupId>
+                       <artifactId>druid-sql</artifactId>
+                       <version>${project.parent.version}</version>
+                       <scope>provided</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.apache.druid</groupId>
+                       <artifactId>druid-core</artifactId>
+                       <version>${project.parent.version}</version>
+                       <scope>provided</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.apache.druid</groupId>
+                       <artifactId>druid-server</artifactId>
+                       <version>${project.parent.version}</version>
+                       <scope>provided</scope>
+               </dependency>
+
+               <!-- Test Dependencies -->
+               <dependency>
+                       <groupId>org.apache.druid</groupId>
+                       <artifactId>druid-core</artifactId>
+                       <version>${project.parent.version}</version>
+                       <type>test-jar</type>
+                       <scope>test</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.apache.druid</groupId>
+                       <artifactId>druid-processing</artifactId>
+                       <version>${project.parent.version}</version>
+                       <type>test-jar</type>
+                       <scope>test</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.apache.druid</groupId>
+                       <artifactId>druid-server</artifactId>
+                       <version>${project.parent.version}</version>
+                       <scope>test</scope>
+                       <type>test-jar</type>
+               </dependency>
+               <dependency>
+                       <groupId>org.apache.druid</groupId>
+                       <artifactId>druid-sql</artifactId>
+                       <version>${project.parent.version}</version>
+                       <type>test-jar</type>
+                       <scope>test</scope>
+               </dependency>
+               <dependency>
+                       <groupId>com.google.code.findbugs</groupId>
+                       <artifactId>jsr305</artifactId>
+                       <scope>provided</scope>
+               </dependency>
+               <dependency>
+                       <groupId>com.github.rvesse</groupId>
+                       <artifactId>airline</artifactId>
+                       <scope>provided</scope>
+               </dependency>
+               <dependency>
+                       <groupId>com.google.inject</groupId>
+                       <artifactId>guice</artifactId>
+                       <scope>provided</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.apache.commons</groupId>
+                       <artifactId>commons-lang3</artifactId>
+                       <scope>provided</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.eclipse.jetty</groupId>
+                       <artifactId>jetty-servlet</artifactId>
+                       <scope>provided</scope>
+               </dependency>
+               <dependency>
+                       <groupId>joda-time</groupId>
+                       <artifactId>joda-time</artifactId>
+                       <scope>provided</scope>
+               </dependency>
+               <dependency>
+                       <groupId>com.fasterxml.jackson.core</groupId>
+                       <artifactId>jackson-databind</artifactId>
+                       <scope>provided</scope>
+               </dependency>
+               <dependency>
+                       <groupId>com.google.guava</groupId>
+                       <artifactId>guava</artifactId>
+                       <scope>provided</scope>
+               </dependency>
+               <dependency>
+                       <groupId>com.google.inject.extensions</groupId>
+                       <artifactId>guice-servlet</artifactId>
+                       <scope>provided</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.eclipse.jetty</groupId>
+                       <artifactId>jetty-server</artifactId>
+                       <scope>provided</scope>
+               </dependency>
+               <dependency>
+                       <groupId>org.apache.calcite</groupId>
+                       <artifactId>calcite-core</artifactId>
+                       <scope>provided</scope>
+               </dependency>
+  </dependencies>
+
+</project>
diff --git 
a/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/CliCustomNodeRole.java
 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/CliCustomNodeRole.java
new file mode 100644
index 0000000000..97e5e70e22
--- /dev/null
+++ 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/CliCustomNodeRole.java
@@ -0,0 +1,176 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testing.tools;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.github.rvesse.airline.annotations.Command;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableSet;
+import com.google.inject.Inject;
+import com.google.inject.Injector;
+import com.google.inject.Key;
+import com.google.inject.Module;
+import com.google.inject.name.Names;
+import com.google.inject.servlet.GuiceFilter;
+import org.apache.druid.cli.ServerRunnable;
+import org.apache.druid.client.coordinator.CoordinatorClient;
+import org.apache.druid.discovery.NodeRole;
+import org.apache.druid.guice.Jerseys;
+import org.apache.druid.guice.LazySingleton;
+import org.apache.druid.guice.LifecycleModule;
+import org.apache.druid.guice.annotations.Json;
+import org.apache.druid.java.util.common.logger.Logger;
+import org.apache.druid.server.http.SelfDiscoveryResource;
+import org.apache.druid.server.initialization.ServerConfig;
+import org.apache.druid.server.initialization.jetty.JettyServerInitUtils;
+import org.apache.druid.server.initialization.jetty.JettyServerInitializer;
+import org.apache.druid.server.security.AuthenticationUtils;
+import org.apache.druid.server.security.Authenticator;
+import org.apache.druid.server.security.AuthenticatorMapper;
+import org.eclipse.jetty.server.Handler;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.handler.HandlerList;
+import org.eclipse.jetty.server.handler.StatisticsHandler;
+import org.eclipse.jetty.servlet.DefaultServlet;
+import org.eclipse.jetty.servlet.ServletContextHandler;
+import org.eclipse.jetty.servlet.ServletHolder;
+
+import java.util.List;
+import java.util.Properties;
+import java.util.Set;
+
+@Command(
+    name = CliCustomNodeRole.SERVICE_NAME,
+    description = "Some custom druid node role defined in an extension"
+)
+public class CliCustomNodeRole extends ServerRunnable
+{
+  private static final Logger LOG = new Logger(CliCustomNodeRole.class);
+
+  public static final String SERVICE_NAME = "custom-node-role";
+  public static final int PORT = 9301;
+  public static final int TLS_PORT = 9501;
+  public static final NodeRole NODE_ROLE = new 
NodeRole(CliCustomNodeRole.SERVICE_NAME);
+
+  public CliCustomNodeRole()
+  {
+    super(LOG);
+  }
+
+  @Override
+  protected Set<NodeRole> getNodeRoles(Properties properties)
+  {
+    return ImmutableSet.of(NODE_ROLE);
+  }
+
+  @Override
+  protected List<? extends Module> getModules()
+  {
+    return ImmutableList.of(
+        binder -> {
+          LOG.info("starting up custom node role");
+          
binder.bindConstant().annotatedWith(Names.named("serviceName")).to(CliCustomNodeRole.SERVICE_NAME);
+          
binder.bindConstant().annotatedWith(Names.named("servicePort")).to(CliCustomNodeRole.PORT);
+          
binder.bindConstant().annotatedWith(Names.named("tlsServicePort")).to(CliCustomNodeRole.TLS_PORT);
+
+          binder.bind(CoordinatorClient.class).in(LazySingleton.class);
+
+          
binder.bind(JettyServerInitializer.class).to(CustomJettyServiceInitializer.class).in(LazySingleton.class);
+          LifecycleModule.register(binder, Server.class);
+
+          bindAnnouncer(
+              binder,
+              DiscoverySideEffectsProvider.create()
+          );
+          Jerseys.addResource(binder, SelfDiscoveryResource.class);
+          LifecycleModule.registerKey(binder, 
Key.get(SelfDiscoveryResource.class));
+      }
+    );
+  }
+
+  // ugly mimic of other Jetty initializers
+  private static class CustomJettyServiceInitializer implements 
JettyServerInitializer
+  {
+    private static List<String> UNSECURED_PATHS = ImmutableList.of(
+        "/status/health"
+    );
+
+    private final ServerConfig serverConfig;
+
+    @Inject
+    public CustomJettyServiceInitializer(ServerConfig serverConfig)
+    {
+      this.serverConfig = serverConfig;
+    }
+
+    @Override
+    public void initialize(Server server, Injector injector)
+    {
+      final ServletContextHandler root = new 
ServletContextHandler(ServletContextHandler.SESSIONS);
+      root.addServlet(new ServletHolder(new DefaultServlet()), "/*");
+
+      final ObjectMapper jsonMapper = 
injector.getInstance(Key.get(ObjectMapper.class, Json.class));
+      final AuthenticatorMapper authenticatorMapper = 
injector.getInstance(AuthenticatorMapper.class);
+
+      AuthenticationUtils.addSecuritySanityCheckFilter(root, jsonMapper);
+
+      // perform no-op authorization for these resources
+      AuthenticationUtils.addNoopAuthenticationAndAuthorizationFilters(root, 
UNSECURED_PATHS);
+
+      List<Authenticator> authenticators = 
authenticatorMapper.getAuthenticatorChain();
+      AuthenticationUtils.addAuthenticationFilterChain(root, authenticators);
+
+      JettyServerInitUtils.addAllowHttpMethodsFilter(root, 
serverConfig.getAllowedHttpMethods());
+
+      JettyServerInitUtils.addExtensionFilters(root, injector);
+
+      // Check that requests were authorized before sending responses
+      AuthenticationUtils.addPreResponseAuthorizationCheckFilter(
+          root,
+          authenticators,
+          jsonMapper
+      );
+
+      root.addFilter(GuiceFilter.class, "/*", null);
+
+      final HandlerList handlerList = new HandlerList();
+      // Do not change the order of the handlers that have already been added
+      for (Handler handler : server.getHandlers()) {
+        handlerList.addHandler(handler);
+      }
+
+      handlerList.addHandler(JettyServerInitUtils.getJettyRequestLogHandler());
+
+      // Add Gzip handler at the very end
+      handlerList.addHandler(
+          JettyServerInitUtils.wrapWithDefaultGzipHandler(
+              root,
+              serverConfig.getInflateBufferSize(),
+              serverConfig.getCompressionLevel()
+          )
+      );
+
+      final StatisticsHandler statisticsHandler = new StatisticsHandler();
+      statisticsHandler.setHandler(handlerList);
+
+      server.setHandler(statisticsHandler);
+    }
+  }
+}
diff --git 
a/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/CliHistoricalForQueryErrorTest.java
 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/CliHistoricalForQueryErrorTest.java
new file mode 100644
index 0000000000..89989bd5d5
--- /dev/null
+++ 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/CliHistoricalForQueryErrorTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testing.tools;
+
+import com.github.rvesse.airline.annotations.Command;
+import com.google.inject.Binder;
+import com.google.inject.Inject;
+import org.apache.druid.cli.CliHistorical;
+import org.apache.druid.guice.LazySingleton;
+import org.apache.druid.java.util.common.logger.Logger;
+import org.apache.druid.query.QuerySegmentWalker;
+
+import java.util.Properties;
+
+@Command(
+    name = "historical-for-query-error-test",
+    description = "Runs a Historical node modified for query error test"
+)
+public class CliHistoricalForQueryErrorTest extends CliHistorical
+{
+  private static final Logger log = new 
Logger(CliHistoricalForQueryErrorTest.class);
+
+  public CliHistoricalForQueryErrorTest()
+  {
+    super();
+  }
+
+  @Inject
+  @Override
+  public void configure(Properties properties)
+  {
+    log.info("Historical is configured for testing query error on missing 
segments");
+  }
+
+  @Override
+  public void bindQuerySegmentWalker(Binder binder)
+  {
+    
binder.bind(QuerySegmentWalker.class).to(ServerManagerForQueryErrorTest.class).in(LazySingleton.class);
+  }
+}
diff --git 
a/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/CustomNodeRoleClientModule.java
 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/CustomNodeRoleClientModule.java
new file mode 100644
index 0000000000..f1171ffa4a
--- /dev/null
+++ 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/CustomNodeRoleClientModule.java
@@ -0,0 +1,48 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testing.tools;
+
+import com.fasterxml.jackson.databind.Module;
+import com.google.inject.Binder;
+import org.apache.druid.initialization.DruidModule;
+
+import java.util.Collections;
+import java.util.List;
+
+/**
+ * Super-simple "client" for the custom node role which defines
+ * the node role so that REST APIs and the system tables are
+ * aware of this role.
+ */
+public class CustomNodeRoleClientModule implements DruidModule
+{
+  @Override
+  public void configure(Binder binder)
+  {
+    // Not yet. Pending PR #12222
+    // NodeRoles.addRole(binder, CliCustomNodeRole.NODE_ROLE);
+  }
+
+  @Override
+  public List<? extends Module> getJacksonModules()
+  {
+    return Collections.emptyList();
+  }
+}
diff --git 
a/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/CustomNodeRoleCommandCreator.java
 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/CustomNodeRoleCommandCreator.java
new file mode 100644
index 0000000000..028f574f22
--- /dev/null
+++ 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/CustomNodeRoleCommandCreator.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testing.tools;
+
+import com.github.rvesse.airline.builder.CliBuilder;
+import org.apache.druid.cli.CliCommandCreator;
+
+public class CustomNodeRoleCommandCreator implements CliCommandCreator
+{
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Override
+  public void addCommands(CliBuilder builder)
+  {
+    builder.withGroup("server").withCommands(CliCustomNodeRole.class);
+  }
+}
diff --git 
a/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/QueryRetryTestCommandCreator.java
 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/QueryRetryTestCommandCreator.java
new file mode 100644
index 0000000000..4fdd9e9d2d
--- /dev/null
+++ 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/QueryRetryTestCommandCreator.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testing.tools;
+
+import com.github.rvesse.airline.builder.CliBuilder;
+import org.apache.druid.cli.CliCommandCreator;
+
+public class QueryRetryTestCommandCreator implements CliCommandCreator
+{
+  @SuppressWarnings({ "rawtypes", "unchecked" })
+  @Override
+  public void addCommands(CliBuilder builder)
+  {
+    
builder.withGroup("server").withCommands(CliHistoricalForQueryErrorTest.class);
+  }
+}
diff --git 
a/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/ServerManagerForQueryErrorTest.java
 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/ServerManagerForQueryErrorTest.java
new file mode 100644
index 0000000000..160cd9db3a
--- /dev/null
+++ 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/ServerManagerForQueryErrorTest.java
@@ -0,0 +1,238 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testing.tools;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.inject.Inject;
+import org.apache.commons.lang3.mutable.MutableBoolean;
+import org.apache.druid.client.cache.Cache;
+import org.apache.druid.client.cache.CacheConfig;
+import org.apache.druid.client.cache.CachePopulator;
+import org.apache.druid.guice.annotations.Smile;
+import org.apache.druid.java.util.common.guava.Accumulator;
+import org.apache.druid.java.util.common.guava.Sequence;
+import org.apache.druid.java.util.common.guava.Yielder;
+import org.apache.druid.java.util.common.guava.YieldingAccumulator;
+import org.apache.druid.java.util.common.logger.Logger;
+import org.apache.druid.java.util.emitter.service.ServiceEmitter;
+import org.apache.druid.query.Query;
+import org.apache.druid.query.QueryCapacityExceededException;
+import org.apache.druid.query.QueryProcessingPool;
+import org.apache.druid.query.QueryRunner;
+import org.apache.druid.query.QueryRunnerFactory;
+import org.apache.druid.query.QueryRunnerFactoryConglomerate;
+import org.apache.druid.query.QueryTimeoutException;
+import org.apache.druid.query.QueryToolChest;
+import org.apache.druid.query.QueryUnsupportedException;
+import org.apache.druid.query.ReportTimelineMissingSegmentQueryRunner;
+import org.apache.druid.query.ResourceLimitExceededException;
+import org.apache.druid.query.SegmentDescriptor;
+import org.apache.druid.segment.ReferenceCountingSegment;
+import org.apache.druid.segment.SegmentReference;
+import org.apache.druid.segment.join.JoinableFactoryWrapper;
+import org.apache.druid.server.SegmentManager;
+import org.apache.druid.server.coordination.ServerManager;
+import org.apache.druid.server.initialization.ServerConfig;
+import org.apache.druid.timeline.VersionedIntervalTimeline;
+
+import java.util.HashSet;
+import java.util.Optional;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.atomic.AtomicLong;
+import java.util.function.Function;
+
+/**
+ * This server manager is designed to test various query failures.
+ *
+ * - Missing segments. A segment can be missing during a query if a historical 
drops the segment
+ *   after the broker issues the query to the historical. To mimic this 
situation, the historical
+ *   with this server manager announces all segments assigned, but reports 
missing segments for the
+ *   first 3 segments specified in the query. See 
ITQueryRetryTestOnMissingSegments.
+ * - Other query errors. This server manager returns a sequence that always 
throws an exception
+ *   based on a given query context value. See ITQueryErrorTest.
+ *
+ * @see org.apache.druid.query.RetryQueryRunner for query retrying.
+ * @see org.apache.druid.client.JsonParserIterator for handling query errors 
from historicals.
+ */
+public class ServerManagerForQueryErrorTest extends ServerManager
+{
+  // Query context key that indicates this query is for query retry testing.
+  public static final String QUERY_RETRY_TEST_CONTEXT_KEY = "query-retry-test";
+  public static final String QUERY_TIMEOUT_TEST_CONTEXT_KEY = 
"query-timeout-test";
+  public static final String QUERY_CAPACITY_EXCEEDED_TEST_CONTEXT_KEY = 
"query-capacity-exceeded-test";
+  public static final String QUERY_UNSUPPORTED_TEST_CONTEXT_KEY = 
"query-unsupported-test";
+  public static final String RESOURCE_LIMIT_EXCEEDED_TEST_CONTEXT_KEY = 
"resource-limit-exceeded-test";
+  public static final String QUERY_FAILURE_TEST_CONTEXT_KEY = 
"query-failure-test";
+
+  private static final Logger LOG = new 
Logger(ServerManagerForQueryErrorTest.class);
+  private static final int MAX_NUM_FALSE_MISSING_SEGMENTS_REPORTS = 3;
+
+  private final ConcurrentHashMap<String, Set<SegmentDescriptor>> 
queryToIgnoredSegments = new ConcurrentHashMap<>();
+
+  @Inject
+  public ServerManagerForQueryErrorTest(
+      QueryRunnerFactoryConglomerate conglomerate,
+      ServiceEmitter emitter,
+      QueryProcessingPool queryProcessingPool,
+      CachePopulator cachePopulator,
+      @Smile ObjectMapper objectMapper,
+      Cache cache,
+      CacheConfig cacheConfig,
+      SegmentManager segmentManager,
+      JoinableFactoryWrapper joinableFactoryWrapper,
+      ServerConfig serverConfig
+  )
+  {
+    super(
+        conglomerate,
+        emitter,
+        queryProcessingPool,
+        cachePopulator,
+        objectMapper,
+        cache,
+        cacheConfig,
+        segmentManager,
+        joinableFactoryWrapper,
+        serverConfig
+    );
+  }
+
+  @Override
+  protected <T> QueryRunner<T> buildQueryRunnerForSegment(
+      Query<T> query,
+      SegmentDescriptor descriptor,
+      QueryRunnerFactory<T, Query<T>> factory,
+      QueryToolChest<T, Query<T>> toolChest,
+      VersionedIntervalTimeline<String, ReferenceCountingSegment> timeline,
+      Function<SegmentReference, SegmentReference> segmentMapFn,
+      AtomicLong cpuTimeAccumulator,
+      Optional<byte[]> cacheKeyPrefix
+  )
+  {
+    if (query.getContextBoolean(QUERY_RETRY_TEST_CONTEXT_KEY, false)) {
+      final MutableBoolean isIgnoreSegment = new MutableBoolean(false);
+      queryToIgnoredSegments.compute(
+          query.getMostSpecificId(),
+          (queryId, ignoredSegments) -> {
+            if (ignoredSegments == null) {
+              ignoredSegments = new HashSet<>();
+            }
+            if (ignoredSegments.size() < 
MAX_NUM_FALSE_MISSING_SEGMENTS_REPORTS) {
+              ignoredSegments.add(descriptor);
+              isIgnoreSegment.setTrue();
+            }
+            return ignoredSegments;
+          }
+      );
+
+      if (isIgnoreSegment.isTrue()) {
+        LOG.info("Pretending I don't have segment [%s]", descriptor);
+        return new ReportTimelineMissingSegmentQueryRunner<>(descriptor);
+      }
+    } else if (query.getContextBoolean(QUERY_TIMEOUT_TEST_CONTEXT_KEY, false)) 
{
+      return (queryPlus, responseContext) -> new Sequence<T>()
+      {
+        @Override
+        public <OutType> OutType accumulate(OutType initValue, 
Accumulator<OutType, T> accumulator)
+        {
+          throw new QueryTimeoutException("query timeout test");
+        }
+
+        @Override
+        public <OutType> Yielder<OutType> toYielder(OutType initValue, 
YieldingAccumulator<OutType, T> accumulator)
+        {
+          throw new QueryTimeoutException("query timeout test");
+        }
+      };
+    } else if 
(query.getContextBoolean(QUERY_CAPACITY_EXCEEDED_TEST_CONTEXT_KEY, false)) {
+      return (queryPlus, responseContext) -> new Sequence<T>()
+      {
+        @Override
+        public <OutType> OutType accumulate(OutType initValue, 
Accumulator<OutType, T> accumulator)
+        {
+          throw 
QueryCapacityExceededException.withErrorMessageAndResolvedHost("query capacity 
exceeded test");
+        }
+
+        @Override
+        public <OutType> Yielder<OutType> toYielder(OutType initValue, 
YieldingAccumulator<OutType, T> accumulator)
+        {
+          throw 
QueryCapacityExceededException.withErrorMessageAndResolvedHost("query capacity 
exceeded test");
+        }
+      };
+    } else if (query.getContextBoolean(QUERY_UNSUPPORTED_TEST_CONTEXT_KEY, 
false)) {
+      return (queryPlus, responseContext) -> new Sequence<T>()
+      {
+        @Override
+        public <OutType> OutType accumulate(OutType initValue, 
Accumulator<OutType, T> accumulator)
+        {
+          throw new QueryUnsupportedException("query unsupported test");
+        }
+
+        @Override
+        public <OutType> Yielder<OutType> toYielder(OutType initValue, 
YieldingAccumulator<OutType, T> accumulator)
+        {
+          throw new QueryUnsupportedException("query unsupported test");
+        }
+      };
+    } else if 
(query.getContextBoolean(RESOURCE_LIMIT_EXCEEDED_TEST_CONTEXT_KEY, false)) {
+      return (queryPlus, responseContext) -> new Sequence<T>()
+      {
+        @Override
+        public <OutType> OutType accumulate(OutType initValue, 
Accumulator<OutType, T> accumulator)
+        {
+          throw new ResourceLimitExceededException("resource limit exceeded 
test");
+        }
+
+        @Override
+        public <OutType> Yielder<OutType> toYielder(OutType initValue, 
YieldingAccumulator<OutType, T> accumulator)
+        {
+          throw new ResourceLimitExceededException("resource limit exceeded 
test");
+        }
+      };
+    } else if (query.getContextBoolean(QUERY_FAILURE_TEST_CONTEXT_KEY, false)) 
{
+      return (queryPlus, responseContext) -> new Sequence<T>()
+      {
+        @Override
+        public <OutType> OutType accumulate(OutType initValue, 
Accumulator<OutType, T> accumulator)
+        {
+          throw new RuntimeException("query failure test");
+        }
+
+        @Override
+        public <OutType> Yielder<OutType> toYielder(OutType initValue, 
YieldingAccumulator<OutType, T> accumulator)
+        {
+          throw new RuntimeException("query failure test");
+        }
+      };
+    }
+
+    return super.buildQueryRunnerForSegment(
+        query,
+        descriptor,
+        factory,
+        toolChest,
+        timeline,
+        segmentMapFn,
+        cpuTimeAccumulator,
+        cacheKeyPrefix
+    );
+  }
+}
diff --git 
a/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/SleepExprMacro.java
 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/SleepExprMacro.java
new file mode 100644
index 0000000000..d278f6cda3
--- /dev/null
+++ 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/SleepExprMacro.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testing.tools;
+
+import org.apache.druid.java.util.common.IAE;
+import org.apache.druid.math.expr.Expr;
+import org.apache.druid.math.expr.ExprEval;
+import 
org.apache.druid.math.expr.ExprMacroTable.BaseScalarUnivariateMacroFunctionExpr;
+import org.apache.druid.math.expr.ExprMacroTable.ExprMacro;
+import org.apache.druid.math.expr.ExpressionType;
+import org.apache.druid.query.expression.ExprUtils;
+
+import java.util.List;
+
+/**
+ * This function makes the current thread sleep for the given amount of 
seconds.
+ * Fractional-second delays can be specified.
+ *
+ * This function is applied per row. The actual query time can vary depending 
on how much parallelism is used
+ * for the query. As it does not provide consistent sleep time, this function 
should be used only for testing
+ * when you want to keep a certain query running during the test.
+ */
+public class SleepExprMacro implements ExprMacro
+{
+  private static final String NAME = "sleep";
+
+  @Override
+  public String name()
+  {
+    return NAME;
+  }
+
+  @Override
+  public Expr apply(List<Expr> args)
+  {
+    if (args.size() != 1) {
+      throw new IAE(ExprUtils.createErrMsg(name(), "must have 1 argument"));
+    }
+
+    Expr arg = args.get(0);
+
+    class SleepExpr extends BaseScalarUnivariateMacroFunctionExpr
+    {
+      public SleepExpr(Expr arg)
+      {
+        super(NAME, arg);
+      }
+
+      @Override
+      public ExprEval eval(ObjectBinding bindings)
+      {
+        ExprEval eval = arg.eval(bindings);
+        try {
+          if (!eval.isNumericNull()) {
+            double seconds = eval.asDouble(); // double to support 
fractional-second.
+            if (seconds > 0) {
+              Thread.sleep((long) (seconds * 1000));
+            }
+          }
+          return ExprEval.of(null);
+        }
+        catch (InterruptedException e) {
+          Thread.currentThread().interrupt();
+          throw new RuntimeException(e);
+        }
+      }
+
+      @Override
+      public Expr visit(Shuttle shuttle)
+      {
+        return shuttle.visit(apply(shuttle.visitAll(args)));
+      }
+
+      /**
+       * Explicitly override this method to not vectorize the sleep expression.
+       * If we ever want to vectorize this expression, {@link #getOutputType} 
should be considered to return something
+       * else than just null.
+       */
+      @Override
+      public boolean canVectorize(InputBindingInspector inspector)
+      {
+        return false;
+      }
+
+      @Override
+      public ExpressionType getOutputType(InputBindingInspector inspector)
+      {
+        return null;
+      }
+    }
+    return new SleepExpr(arg);
+  }
+}
diff --git 
a/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/SleepModule.java
 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/SleepModule.java
new file mode 100644
index 0000000000..a8028f6920
--- /dev/null
+++ 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/SleepModule.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testing.tools;
+
+import com.fasterxml.jackson.databind.Module;
+import com.google.inject.Binder;
+import org.apache.druid.guice.ExpressionModule;
+import org.apache.druid.initialization.DruidModule;
+import org.apache.druid.sql.guice.SqlBindings;
+
+import java.util.Collections;
+import java.util.List;
+
+public class SleepModule implements DruidModule
+{
+  @Override
+  public List<? extends Module> getJacksonModules()
+  {
+    return Collections.emptyList();
+  }
+
+  @Override
+  public void configure(Binder binder)
+  {
+    SqlBindings.addOperatorConversion(binder, SleepOperatorConversion.class);
+    ExpressionModule.addExprMacro(binder, SleepExprMacro.class);
+  }
+}
diff --git 
a/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/SleepOperatorConversion.java
 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/SleepOperatorConversion.java
new file mode 100644
index 0000000000..cae087cf71
--- /dev/null
+++ 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/SleepOperatorConversion.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testing.tools;
+
+import org.apache.calcite.rex.RexNode;
+import org.apache.calcite.sql.SqlFunction;
+import org.apache.calcite.sql.SqlFunctionCategory;
+import org.apache.calcite.sql.SqlOperator;
+import org.apache.calcite.sql.type.SqlTypeFamily;
+import org.apache.calcite.sql.type.SqlTypeName;
+import org.apache.druid.segment.column.RowSignature;
+import org.apache.druid.sql.calcite.expression.DruidExpression;
+import org.apache.druid.sql.calcite.expression.OperatorConversions;
+import org.apache.druid.sql.calcite.expression.SqlOperatorConversion;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
+
+import javax.annotation.Nullable;
+
+/**
+ * A SQL operator conversion for the {@link SleepExprMacro} expression.
+ * The expression is evaluated during the query planning when the given 
argument is a number literal.
+ */
+public class SleepOperatorConversion implements SqlOperatorConversion
+{
+  private static final SqlFunction SQL_FUNCTION = OperatorConversions
+      .operatorBuilder("SLEEP")
+      .operandTypes(SqlTypeFamily.NUMERIC)
+      .requiredOperands(1)
+      .returnTypeNullable(SqlTypeName.VARCHAR) // always null
+      .functionCategory(SqlFunctionCategory.TIMEDATE)
+      .build();
+
+  @Override
+  public SqlOperator calciteOperator()
+  {
+    return SQL_FUNCTION;
+  }
+
+  @Nullable
+  @Override
+  public DruidExpression toDruidExpression(PlannerContext plannerContext, 
RowSignature rowSignature, RexNode rexNode)
+  {
+    return OperatorConversions.convertDirectCall(plannerContext, rowSignature, 
rexNode, "sleep");
+  }
+}
diff --git 
a/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/package.java
 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/package.java
new file mode 100644
index 0000000000..74930f7abb
--- /dev/null
+++ 
b/integration-tests-ex/it-tools/src/main/java/org/apache/druid/testing/tools/package.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+/**
+ * Code loaded into a test Druid server for integration testing.
+ *
+ * Code here is excluded from Jacoco inspections.
+ */
+package org.apache.druid.testing.tools;
diff --git 
a/integration-tests-ex/it-tools/src/main/resources/META-INF/services/org.apache.druid.cli.CliCommandCreator
 
b/integration-tests-ex/it-tools/src/main/resources/META-INF/services/org.apache.druid.cli.CliCommandCreator
new file mode 100644
index 0000000000..8a82b0a348
--- /dev/null
+++ 
b/integration-tests-ex/it-tools/src/main/resources/META-INF/services/org.apache.druid.cli.CliCommandCreator
@@ -0,0 +1,17 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+org.apache.druid.testing.tools.QueryRetryTestCommandCreator
+org.apache.druid.testing.tools.CustomNodeRoleCommandCreator
diff --git 
a/integration-tests-ex/it-tools/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule
 
b/integration-tests-ex/it-tools/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule
new file mode 100644
index 0000000000..93b2c2c7f8
--- /dev/null
+++ 
b/integration-tests-ex/it-tools/src/main/resources/META-INF/services/org.apache.druid.initialization.DruidModule
@@ -0,0 +1,17 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+org.apache.druid.testing.tools.SleepModule
+org.apache.druid.testing.tools.CustomNodeRoleClientModule
diff --git a/pom.xml b/pom.xml
index 0466d8616c..5411b0b19e 100644
--- a/pom.xml
+++ b/pom.xml
@@ -212,6 +212,9 @@
         <module>extensions-contrib/opentelemetry-emitter</module>
         <!-- distribution packaging -->
         <module>distribution</module>
+        <!-- Revised integration tests -->
+        <module>integration-tests-ex/it-tools</module>
+        <module>integration-tests-ex/it-image</module>
     </modules>
 
     <repositories>
@@ -1870,6 +1873,7 @@
                                 <exclude>target/**</exclude>
                                 <exclude>licenses/**</exclude>
                                 <exclude>**/test/resources/**</exclude> <!-- 
test data for "old" ITs. -->
+                                <exclude>**/data/data/**</exclude> <!-- test 
data for "new" ITs. -->
                                 <exclude>**/derby.log</exclude>
                                 <exclude>**/jvm.config</exclude>
                                 <exclude>**/*.avsc</exclude>


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to