This is an automated email from the ASF dual-hosted git repository.

abhishekrb pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/druid.git


The following commit(s) were added to refs/heads/master by this push:
     new 9cfc71588df Migrate flaky ITNestedQueryPushDownTest to embedded test 
(#18807)
9cfc71588df is described below

commit 9cfc71588df0bdaeca05d4ef819740ec2bcd462a
Author: Abhishek Radhakrishnan <[email protected]>
AuthorDate: Thu Dec 4 11:37:53 2025 -0800

    Migrate flaky ITNestedQueryPushDownTest to embedded test (#18807)
    
    * Add embedded test ForcePushdownNestedQuery (to replace the existing flaky 
IT).
    
    The ITNestedQueryPushDownTest has recently been very flaky.
    So this patch migrates it to the Embedded test framework and hopefully
    should resolve any flakiness in this IT for good.
    
    * Remove references to the old IT: ITNestedQueryPushDownTest
    
    * Original native queries for completeness.
    
    * Review suggestion: break down into native and sql queries each
---
 .github/workflows/standard-its.yml                 |  60 +--
 .../query/ForcePushDownNestedQueryTest.java        | 422 +++++++++++++++++++++
 .../testing/embedded/query/QueryTestBase.java      |   2 +-
 integration-tests/k8s_run_config_file.json         |  16 -
 .../tests/indexer/ITNestedQueryPushDownTest.java   | 111 ------
 .../queries/nestedquerypushdown_queries.json       | 303 ---------------
 6 files changed, 424 insertions(+), 490 deletions(-)

diff --git a/.github/workflows/standard-its.yml 
b/.github/workflows/standard-its.yml
index 16e57e60aa6..1af7acd9c8e 100644
--- a/.github/workflows/standard-its.yml
+++ b/.github/workflows/standard-its.yml
@@ -112,62 +112,4 @@ jobs:
       testing_groups: -Dgroups=custom-coordinator-duties
       use_indexer: middleManager
       override_config_path: 
./environment-configs/test-groups/custom-coordinator-duties
-      group: custom coordinator duties
-
-  integration-k8s-leadership-tests:
-    needs: changes
-    name: (Compile=openjdk17, Run=openjdk17, Cluster Build On K8s) 
ITNestedQueryPushDownTest integration test
-    if: ${{ needs.changes.outputs.core == 'true' || 
needs.changes.outputs.common-extensions == 'true' }}
-    runs-on: ubuntu-22.04
-    env:
-      MVN: mvn --no-snapshot-updates
-      MAVEN_SKIP: -P skip-static-checks -Dweb.console.skip=true 
-Dmaven.javadoc.skip=true
-      CONFIG_FILE: k8s_run_config_file.json
-      IT_TEST: -Dit.test=ITNestedQueryPushDownTest
-      POD_NAME: int-test
-      POD_NAMESPACE: default
-      BUILD_DRUID_CLUSTER: true
-    steps:
-      - name: Checkout branch
-        uses: actions/checkout@v4
-
-      - name: setup java
-        uses: actions/setup-java@v4
-        with:
-          java-version: '17'
-          distribution: 'zulu'
-
-      # the build step produces SNAPSHOT artifacts into the local maven 
repository,
-      # we include github.sha in the cache key to make it specific to that 
build/jdk
-      - name: Restore Maven repository
-        id: maven-restore
-        uses: actions/cache/restore@v4
-        with:
-          path: ~/.m2/repository
-          key: maven-${{ runner.os }}-17-${{ github.sha }}
-          restore-keys: setup-java-Linux-maven-${{ hashFiles('**/pom.xml') }}
-
-      - name: Maven build
-        if: steps.maven-restore.outputs.cache-hit != 'true'
-        run: |
-          ./it.sh ci
-
-      - name: Run IT
-        id: test
-        timeout-minutes: 90
-        run: |
-          set -x
-          mvn -B -ff install -pl '!web-console' -Pdist,bundle-contrib-exts 
-Pskip-static-checks,skip-tests -Dmaven.javadoc.skip=true -T1C
-          # Note: The above command relies on the correct version of the JARs 
being installed in the local m2 repository.
-          # For any changes, please rebuild it using the command from the 
previous step (./it.sh ci).
-
-          MAVEN_OPTS='-Xmx2048m' ${MVN} verify -pl integration-tests -P 
int-tests-config-file ${IT_TEST} ${MAVEN_SKIP} -Dpod.name=${POD_NAME} 
-Dpod.namespace=${POD_NAMESPACE} -Dbuild.druid.cluster=${BUILD_DRUID_CLUSTER}
-
-      - name: Debug on failure
-        if: ${{ failure() && steps.test.conclusion == 'failure' }}
-        run: |
-          for v in broker middlemanager router coordinator historical ; do
-          echo 
"------------------------druid-tiny-cluster-"$v"s-0-------------------------";
-          /usr/local/bin/kubectl logs --tail 1000 druid-tiny-cluster-"$v"s-0 
||:;
-          /usr/local/bin/kubectl get events | grep druid-tiny-cluster-"$v"s-0 
||:;
-          done
+      group: custom coordinator duties
\ No newline at end of file
diff --git 
a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/ForcePushDownNestedQueryTest.java
 
b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/ForcePushDownNestedQueryTest.java
new file mode 100644
index 00000000000..b921d1c75ac
--- /dev/null
+++ 
b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/ForcePushDownNestedQueryTest.java
@@ -0,0 +1,422 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.testing.embedded.query;
+
+import com.fasterxml.jackson.core.type.TypeReference;
+import org.apache.druid.data.input.impl.LocalInputSource;
+import org.apache.druid.indexer.TaskState;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.java.util.common.granularity.Granularities;
+import org.apache.druid.java.util.common.jackson.JacksonUtils;
+import org.apache.druid.msq.indexing.report.MSQTaskReportPayload;
+import org.apache.druid.query.DruidProcessingConfigTest;
+import org.apache.druid.query.Query;
+import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
+import org.apache.druid.query.dimension.DefaultDimensionSpec;
+import org.apache.druid.query.filter.AndDimFilter;
+import org.apache.druid.query.filter.OrDimFilter;
+import org.apache.druid.query.filter.SelectorDimFilter;
+import org.apache.druid.query.groupby.GroupByQuery;
+import org.apache.druid.query.groupby.GroupByQueryConfig;
+import org.apache.druid.query.groupby.having.GreaterThanHavingSpec;
+import org.apache.druid.query.groupby.having.OrHavingSpec;
+import org.apache.druid.segment.TestHelper;
+import org.apache.druid.sql.calcite.planner.Calcites;
+import org.apache.druid.testing.embedded.EmbeddedClusterApis;
+import org.apache.druid.testing.embedded.msq.EmbeddedMSQApis;
+import org.junit.jupiter.api.Assertions;
+import org.junit.jupiter.api.Disabled;
+import org.junit.jupiter.api.Test;
+import org.testcontainers.shaded.com.google.common.io.ByteStreams;
+
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Embedded test that verifies nested group by native and SQL queries
+ * when using the {@link 
GroupByQueryConfig#CTX_KEY_FORCE_PUSH_DOWN_NESTED_QUERY} context.
+ */
+public class ForcePushDownNestedQueryTest extends QueryTestBase
+{
+  private final String interval = "2015-09-12/2015-09-13";
+  private final Map<String, Object> forcePushDownNestedContext = 
Map.of("forcePushDownNestedQuery", "true");
+
+  @Override
+  public void beforeAll() throws IOException
+  {
+    dataSource = EmbeddedClusterApis.createTestDatasourceName();
+    loadWikipediaTable();
+  }
+
+  @Override
+  protected void refreshDatasourceName()
+  {
+    // don't change the datasource name for each run because we set things up 
before all tests
+  }
+
+  @Test
+  public void test_native_forcePushDownNestedQueryWithMultipleAggregators()
+  {
+    verifyQuery(
+        GroupByQuery
+            .builder()
+            .setDataSource(
+                GroupByQuery
+                    .builder()
+                    .setDataSource(dataSource)
+                    .setInterval(interval)
+                    .setDimensions(
+                        new DefaultDimensionSpec("channel", null),
+                        new DefaultDimensionSpec("user", null)
+                    )
+                    .setAggregatorSpecs(new 
LongSumAggregatorFactory("sumAdded", "added"))
+                    .setGranularity(Granularities.ALL)
+                    .setContext(Map.of("forcePushDownNestedQuery", "true"))
+                    .build()
+            )
+            .setInterval(interval)
+            .setAggregatorSpecs(new 
LongSumAggregatorFactory("groupedSumAdded", "sumAdded"))
+            .setGranularity(Granularities.ALL)
+            .setContext(forcePushDownNestedContext)
+            .build(),
+        List.of(
+            Map.of(
+                "version", "v1",
+                "timestamp", "2015-09-12T00:00:00.000Z",
+                "event", Map.of("groupedSumAdded", 9385573)
+            )
+        )
+    );
+  }
+
+  @Test
+  public void test_native_forcePushDownNestedQueryWithAliasDimensions()
+  {
+    verifyQuery(
+        GroupByQuery
+            .builder()
+            .setDataSource(
+                GroupByQuery
+                    .builder()
+                    .setDataSource(dataSource)
+                    .setInterval(interval)
+                    .setDimensions(
+                        new DefaultDimensionSpec("channel", "renamedChannel"),
+                        new DefaultDimensionSpec("user", "renamedUser")
+                    )
+                    .setAggregatorSpecs(new 
LongSumAggregatorFactory("sumAdded", "added"))
+                    .setGranularity(Granularities.ALL)
+                    .setContext(forcePushDownNestedContext)
+                    .build()
+            )
+            .setInterval(interval)
+            .setAggregatorSpecs(new 
LongSumAggregatorFactory("groupedSumAdded", "sumAdded"))
+            .setGranularity(Granularities.ALL)
+            .setContext(forcePushDownNestedContext)
+            .build(),
+        List.of(
+            Map.of(
+                "version", "v1",
+                "timestamp", "2015-09-12T00:00:00.000Z",
+                "event", Map.of("groupedSumAdded", 9385573)
+            )
+        )
+    );
+  }
+
+  @Test
+  public void 
test_native_forcePushDownNestedQueryWithFiltersInInnerAndOuterQueries()
+  {
+    verifyQuery(
+        GroupByQuery
+            .builder()
+            .setDataSource(
+                GroupByQuery
+                    .builder()
+                    .setDataSource(dataSource)
+                    .setInterval(interval)
+                    .setDimensions(
+                        new DefaultDimensionSpec("channel", "renamedChannel"),
+                        new DefaultDimensionSpec("user", "renamedUser")
+                    )
+                    .setAggregatorSpecs(new 
LongSumAggregatorFactory("sumAdded", "added"))
+                    .setDimFilter(
+                        new OrDimFilter(
+                            List.of(
+                                new SelectorDimFilter("channel", 
"#zh.wikipedia", null),
+                                new SelectorDimFilter("channel", 
"#es.wikipedia", null)
+                            )
+                        )
+                    )
+                    .setGranularity(Granularities.ALL)
+                    .setContext(forcePushDownNestedContext)
+                    .build()
+            )
+            .setInterval(interval)
+            .setAggregatorSpecs(new 
LongSumAggregatorFactory("groupedSumAdded", "sumAdded"))
+            .setDimFilter(new AndDimFilter(List.of(new 
SelectorDimFilter("renamedChannel", "#zh.wikipedia", null))))
+            .setGranularity(Granularities.ALL)
+            .setContext(forcePushDownNestedContext)
+            .build(),
+        List.of(
+            Map.of(
+                "version", "v1",
+                "timestamp", "2015-09-12T00:00:00.000Z",
+                "event", Map.of("groupedSumAdded", 191033)
+            )
+        )
+    );
+  }
+
+  @Test
+  public void test_native_forcePushDownNestedQueryWithHavingClause()
+  {
+    verifyQuery(
+        GroupByQuery
+            .builder()
+            .setDataSource(
+                GroupByQuery
+                    .builder()
+                    .setDataSource(dataSource)
+                    .setInterval(interval)
+                    .setDimensions(
+                        new DefaultDimensionSpec("channel", null),
+                        new DefaultDimensionSpec("user", null)
+                    )
+                    .setAggregatorSpecs(new 
LongSumAggregatorFactory("sumAdded", "added"))
+                    .setGranularity(Granularities.ALL)
+                    .setContext(forcePushDownNestedContext)
+                    .build()
+            )
+            .setInterval(interval)
+            .setAggregatorSpecs(new LongSumAggregatorFactory("outerSum", 
"sumAdded"))
+            .setHavingSpec(new OrHavingSpec(List.of(new 
GreaterThanHavingSpec("outerSum", 9_385_570))))
+            .setGranularity(Granularities.ALL)
+            .setContext(forcePushDownNestedContext)
+            .build(),
+        List.of(
+            Map.of(
+                "version", "v1",
+                "timestamp", "2015-09-12T00:00:00.000Z",
+                "event", Map.of("outerSum", 9_385_573)
+            )
+        )
+    );
+  }
+
+  @Test
+  public void test_native_forcePushDownNestedQueryWithHavingClause2()
+  {
+    verifyQuery(
+        GroupByQuery
+            .builder()
+            .setDataSource(
+                GroupByQuery
+                    .builder()
+                    .setDataSource(dataSource)
+                    .setInterval(interval)
+                    .setDimensions(
+                        new DefaultDimensionSpec("channel", null),
+                        new DefaultDimensionSpec("user", null)
+                    )
+                    .setAggregatorSpecs(new 
LongSumAggregatorFactory("sumAdded", "added"))
+                    .setGranularity(Granularities.ALL)
+                    .setContext(forcePushDownNestedContext)
+                    .build()
+            )
+            .setInterval(interval)
+            .setAggregatorSpecs(new LongSumAggregatorFactory("outerSum", 
"sumAdded"))
+            .setHavingSpec(new OrHavingSpec(List.of(new 
GreaterThanHavingSpec("outerSum", 100_000_000))))
+            .setGranularity(Granularities.ALL)
+            .setContext(forcePushDownNestedContext)
+            .build(),
+        List.of()
+    );
+  }
+
+  @Test
+  public void test_sql_forcePushDownNestedQueryWithMultipleAggregators()
+  {
+    cluster.callApi().verifySqlQuery(
+        "SET forcePushDownNestedQuery = TRUE;\n"
+        + "SELECT SUM(sumAdded) AS \"groupedSumAdded\" FROM (\n"
+        + "  SELECT channel, \"user\", SUM(added) AS sumAdded\n"
+        + "  FROM %s\n"
+        + "  GROUP BY channel, \"user\"\n"
+        + ")",
+        dataSource,
+        "9385573"
+    );
+  }
+
+  @Test
+  public void test_sql_forcePushDownNestedQueryWithAliasDimensions()
+  {
+    cluster.callApi().verifySqlQuery(
+        "SET forcePushDownNestedQuery = TRUE;\n"
+        + "SELECT SUM(sumAdded) AS groupedSumAdded FROM (\n"
+        + "  SELECT channel AS renamedChannel, \"user\" AS renamedUser, 
SUM(added) AS sumAdded\n"
+        + "  FROM %s\n"
+        + "  GROUP BY channel, \"user\"\n"
+        + ") inner_q",
+        dataSource,
+        "9385573"
+    );
+  }
+
+  /**
+   * Same as {@link 
#test_sql_forcePushDownNestedQuery_doesNotReturnAdditionalResults},
+   * but with forcePushDownNestedQuery set to false.
+   */
+  @Test
+  public void test_sql_forcePushDownNestedDisabledWithFilters()
+  {
+    cluster.callApi().verifySqlQuery(
+        "SET forcePushDownNestedQuery = FALSE;\n"
+        + "SELECT renamedChannel, SUM(sumAdded) AS \"groupedSumAdded\"\n"
+        + "FROM (\n"
+        + "  SELECT channel AS renamedChannel, \"user\", SUM(added) AS 
sumAdded\n"
+        + "  FROM %s\n"
+        + "  WHERE channel IN ('#zh.wikipedia', '#es.wikipedia')\n"
+        + "  GROUP BY channel, \"user\"\n"
+        + ") inner_q\n"
+        + "WHERE renamedChannel = '#zh.wikipedia'\n"
+        + "GROUP BY renamedChannel",
+        dataSource,
+        "#zh.wikipedia,191033"
+    );
+  }
+
+  @Test
+  public void test_sql_forcePushDownNestedQueryWithHavingClause()
+  {
+    cluster.callApi().verifySqlQuery(
+        "SET forcePushDownNestedQuery = TRUE;\n"
+        + "SELECT SUM(sumAdded) AS outerSum\n"
+        + "FROM (\n"
+        + "  SELECT channel, \"user\", SUM(added) AS sumAdded\n"
+        + "  FROM %s\n"
+        + "  GROUP BY channel, \"user\"\n"
+        + ") inner_q\n"
+        + "HAVING SUM(sumAdded) > 9385570",
+        dataSource,
+        "9385573"
+    );
+  }
+
+  @Test
+  public void test_sql_forcePushDownNestedQueryWithHavingClause2()
+  {
+    cluster.callApi().verifySqlQuery(
+        "SET forcePushDownNestedQuery = TRUE;\n"
+        + "SELECT SUM(sumAdded) FROM (\n"
+        + "  SELECT channel, \"user\", SUM(added) AS sumAdded\n"
+        + "  FROM %s GROUP BY channel, \"user\"\n"
+        + ") inner_q"
+        + " HAVING SUM(sumAdded) > 100000000",
+        dataSource,
+        ""
+    );
+  }
+
+  @Disabled("Setting forcePushDownNestedQuery = TRUE with filters returns 
additional results, which appears to be a bug"
+            + " in the SQL layer. The same query with forcePushDownNestedQuery 
= FALSE works as expected in 
test_forcePushDownNestedSql_filters_aliasQuoted_forcePushDownDisabled()")
+  @Test
+  public void 
test_sql_forcePushDownNestedQuery_doesNotReturnAdditionalResults()
+  {
+    // When forcePushDownNestedQuery is set to TRUE, this test will fail as 
there's an extra row:
+    // #es.wikipedia,634670\n#zh.wikipedia,191033
+    cluster.callApi().verifySqlQuery(
+        "SET forcePushDownNestedQuery = TRUE;\n"
+        + "SELECT renamedChannel, SUM(sumAdded) AS groupedSumAdded\n"
+        + "FROM (\n"
+        + "  SELECT channel AS renamedChannel, \"user\", SUM(added) AS 
sumAdded\n"
+        + "  FROM %s\n"
+        + "  WHERE channel IN ('#zh.wikipedia', '#es.wikipedia')\n"
+        + "  GROUP BY channel, \"user\"\n"
+        + ") inner_q\n"
+        + "WHERE renamedChannel = '#zh.wikipedia'\n"
+        + "GROUP BY renamedChannel",
+        dataSource,
+        "#zh.wikipedia,191033"
+    );
+  }
+
+  private void loadWikipediaTable() throws IOException
+  {
+    final File tmpDir = cluster.getTestFolder().newFolder();
+    final File wikiFile = new File(tmpDir, "wiki.gz");
+
+    ByteStreams.copy(
+        
DruidProcessingConfigTest.class.getResourceAsStream("/wikipedia/wikiticker-2015-09-12-sampled.json.gz"),
+        Files.newOutputStream(wikiFile.toPath())
+    );
+
+    final String sql = StringUtils.format(
+        "SET waitUntilSegmentsLoad = TRUE;\n"
+        + "REPLACE INTO \"%s\" OVERWRITE ALL\n"
+        + "SELECT\n"
+        + "  TIME_PARSE(\"time\") AS __time,\n"
+        + "  channel,\n"
+        + "  countryName,\n"
+        + "  page,\n"
+        + "  \"user\",\n"
+        + "  added,\n"
+        + "  deleted,\n"
+        + "  delta\n"
+        + "FROM TABLE(\n"
+        + "    EXTERN(\n"
+        + "      %s,\n"
+        + "      '{\"type\":\"json\"}',\n"
+        + "      
'[{\"name\":\"isRobot\",\"type\":\"string\"},{\"name\":\"channel\",\"type\":\"string\"},{\"name\":\"time\",\"type\":\"string\"},{\"name\":\"flags\",\"type\":\"string\"},{\"name\":\"isUnpatrolled\",\"type\":\"string\"},{\"name\":\"page\",\"type\":\"string\"},{\"name\":\"diffUrl\",\"type\":\"string\"},{\"name\":\"added\",\"type\":\"long\"},{\"name\":\"comment\",\"type\":\"string\"},{\"name\":\"commentLength\",\"type\":\"long\"},{\"name\":\"isNew\",\"type\":\"string\"},{\"n
 [...]
+        + "    )\n"
+        + "  )\n"
+        + "PARTITIONED BY DAY\n"
+        + "CLUSTERED BY channel",
+        dataSource,
+        Calcites.escapeStringLiteral(
+            broker.bindings()
+                  .jsonMapper()
+                  .writeValueAsString(new LocalInputSource(null, null, 
Collections.singletonList(wikiFile), null))
+        )
+    );
+
+    final MSQTaskReportPayload payload = new EmbeddedMSQApis(cluster, 
overlord).runTaskSqlAndGetReport(sql);
+    Assertions.assertEquals(TaskState.SUCCESS, 
payload.getStatus().getStatus());
+    Assertions.assertEquals(1, 
payload.getStatus().getSegmentLoadWaiterStatus().getTotalSegments());
+    Assertions.assertNull(payload.getStatus().getErrorReport());
+  }
+
+  private void verifyQuery(Query<?> query, List<Map<String, Object>> 
expectedResult)
+  {
+    final String resultAsJson = cluster.callApi().onAnyBroker(b -> 
b.submitNativeQuery(query));
+    final List<Map<String, Object>> resultList = JacksonUtils.readValue(
+        TestHelper.JSON_MAPPER,
+        resultAsJson.getBytes(StandardCharsets.UTF_8),
+        new TypeReference<>() {}
+    );
+    Assertions.assertEquals(expectedResult, resultList);
+  }
+}
diff --git 
a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/QueryTestBase.java
 
b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/QueryTestBase.java
index 9469dba543c..b8e810b7ca6 100644
--- 
a/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/QueryTestBase.java
+++ 
b/embedded-tests/src/test/java/org/apache/druid/testing/embedded/query/QueryTestBase.java
@@ -63,7 +63,7 @@ public abstract class QueryTestBase extends 
EmbeddedClusterTestBase
   /**
    * Hook for the additional setup that needs to be done before all tests.
    */
-  protected void beforeAll()
+  protected void beforeAll() throws Exception
   {
     // No-op dy default
   }
diff --git a/integration-tests/k8s_run_config_file.json 
b/integration-tests/k8s_run_config_file.json
deleted file mode 100644
index 249e44b19ca..00000000000
--- a/integration-tests/k8s_run_config_file.json
+++ /dev/null
@@ -1,16 +0,0 @@
-{
-  "broker_host" : "localhost",
-  "broker_port" : "30400",
-  "broker_tls_url" : "http://localhost:30100";,
-  "router_host" : "localhost",
-  "router_port" : "30400",
-  "router_tls_url" : "http://localhost:30400";,
-  "indexer_host" : "localhost",
-  "indexer_port" : "30400",
-  "historical_host" : "localhost",
-  "historical_port" : "30300",
-  "coordinator_host" : "localhost",
-  "coordinator_port" : "30400",
-  "middlemanager_host": "localhost",
-  "zookeeper_hosts": "localhost:30600"
-}
\ No newline at end of file
diff --git 
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITNestedQueryPushDownTest.java
 
b/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITNestedQueryPushDownTest.java
deleted file mode 100644
index 8082a3681af..00000000000
--- 
a/integration-tests/src/test/java/org/apache/druid/tests/indexer/ITNestedQueryPushDownTest.java
+++ /dev/null
@@ -1,111 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.apache.druid.tests.indexer;
-
-import com.google.inject.Inject;
-import org.apache.commons.io.IOUtils;
-import org.apache.druid.java.util.common.ISE;
-import org.apache.druid.java.util.common.StringUtils;
-import org.apache.druid.java.util.common.logger.Logger;
-import org.apache.druid.testing.clients.ClientInfoResourceTestClient;
-import org.apache.druid.testing.clients.CoordinatorResourceTestClient;
-import org.apache.druid.testing.guice.DruidTestModuleFactory;
-import org.apache.druid.testing.tools.ITRetryUtil;
-import org.apache.druid.testing.tools.IntegrationTestingConfig;
-import org.apache.druid.testing.utils.TestQueryHelper;
-import org.apache.druid.tests.TestNGGroup;
-import org.testng.annotations.BeforeSuite;
-import org.testng.annotations.Guice;
-import org.testng.annotations.Test;
-
-import java.io.IOException;
-import java.io.InputStream;
-import java.nio.charset.StandardCharsets;
-
-@Test(groups = TestNGGroup.QUERY)
-@Guice(moduleFactory = DruidTestModuleFactory.class)
-public class ITNestedQueryPushDownTest extends AbstractIndexerTest
-{
-  private static final String WIKITICKER_DATA_SOURCE = "wikiticker";
-  private static final String WIKITICKER_INDEX_TASK = 
"/indexer/wikiticker_index_task.json";
-  private static final String WIKITICKER_QUERIES_RESOURCE = 
"/queries/nestedquerypushdown_queries.json";
-
-  @Inject
-  private CoordinatorResourceTestClient coordinatorClient;
-  @Inject
-  private TestQueryHelper queryHelper;
-
-  private static final Logger LOG = new 
Logger(ITNestedQueryPushDownTest.class);
-
-  @Inject
-  private IntegrationTestingConfig config;
-
-  @Inject
-  ClientInfoResourceTestClient clientInfoResourceTestClient;
-
-  private String fullDatasourceName;
-
-  @BeforeSuite
-  public void setFullDatasourceName()
-  {
-    fullDatasourceName = WIKITICKER_DATA_SOURCE + 
config.getExtraDatasourceNameSuffix();
-  }
-
-  @Test
-  public void testIndexData()
-  {
-    try {
-      loadData();
-
-      String queryResponseTemplate;
-      try {
-        InputStream is = 
AbstractITBatchIndexTest.class.getResourceAsStream(WIKITICKER_QUERIES_RESOURCE);
-        queryResponseTemplate = IOUtils.toString(is, StandardCharsets.UTF_8);
-      }
-      catch (IOException e) {
-        throw new ISE(e, "could not read query file: %s", 
WIKITICKER_QUERIES_RESOURCE);
-      }
-
-      queryResponseTemplate = StringUtils.replace(
-          queryResponseTemplate,
-          "%%DATASOURCE%%",
-          fullDatasourceName
-      );
-
-      queryHelper.testQueriesFromString(queryResponseTemplate);
-    }
-    catch (Exception e) {
-      LOG.error(e, "Error while testing");
-      throw new RuntimeException(e);
-    }
-  }
-
-  private void loadData() throws Exception
-  {
-    String taskSpec = getResourceAsString(WIKITICKER_INDEX_TASK);
-    taskSpec = StringUtils.replace(taskSpec, "%%DATASOURCE%%", 
fullDatasourceName);
-    final String taskID = indexer.submitTask(taskSpec);
-    LOG.info("TaskID for loading index task %s", taskID);
-    indexer.waitUntilTaskCompletes(taskID);
-    ITRetryUtil.retryUntilTrue(
-        () -> coordinator.areSegmentsLoaded(fullDatasourceName), "Segment Load"
-    );
-  }
-}
diff --git 
a/integration-tests/src/test/resources/queries/nestedquerypushdown_queries.json 
b/integration-tests/src/test/resources/queries/nestedquerypushdown_queries.json
deleted file mode 100644
index c7a062c02bb..00000000000
--- 
a/integration-tests/src/test/resources/queries/nestedquerypushdown_queries.json
+++ /dev/null
@@ -1,303 +0,0 @@
-[
-  {
-    "description": "Nested group by double agg query with force push down",
-    "query": {
-      "queryType": "groupBy",
-      "dataSource": {
-        "type": "query",
-        "query": {
-          "queryType": "groupBy",
-          "dataSource": "%%DATASOURCE%%",
-          "intervals": [
-            "2015-09-12/2015-09-13"
-          ],
-          "granularity": "all",
-          "dimensions": [
-            "channel",
-            "user"
-          ],
-          "metric": "added",
-          "aggregations": [
-            {
-              "type": "longSum",
-              "name": "sumAdded",
-              "fieldName": "added"
-            }
-          ]
-        }
-      },
-      "granularity": "all",
-      "dimension": "channel",
-      "aggregations": [
-        {
-          "type": "longSum",
-          "name": "groupedSumAdded",
-          "fieldName": "sumAdded"
-        }
-      ],
-      "intervals": [
-        "2015-09-12/2015-09-13"
-      ],
-      "context": {
-        "forcePushDownNestedQuery":"true"
-      }
-    },
-    "expectedResults": [
-      {
-        "version" : "v1",
-        "timestamp" : "2015-09-12T00:00:00.000Z",
-        "event" : {
-          "groupedSumAdded" : 9385573
-        }
-      }
-    ]
-  },
-  {
-    "description": "Nested group by query with force push down and renamed 
dimensions",
-    "query": {
-      "queryType": "groupBy",
-      "dataSource": {
-        "type": "query",
-        "query": {
-          "queryType": "groupBy",
-          "dataSource": "%%DATASOURCE%%",
-          "intervals": [
-            "2015-09-12/2015-09-13"
-          ],
-          "granularity": "all",
-          "dimensions": [
-            {"dimension" : "channel", "outputName" :"renamedChannel"},
-            {"dimension" : "user", "outputName" :"renamedUser"}
-          ],
-          "metric": "added",
-          "aggregations": [
-            {
-              "type": "longSum",
-              "name": "sumAdded",
-              "fieldName": "added"
-            }
-          ]
-        }
-      },
-      "granularity": "all",
-      "dimension": "renamedChannel",
-      "aggregations": [
-        {
-          "type": "longSum",
-          "name": "groupedSumAdded",
-          "fieldName": "sumAdded"
-        }
-      ],
-      "intervals": [
-        "2015-09-12/2015-09-13"
-      ],
-      "context": {
-        "forcePushDownNestedQuery":"true"
-      }
-    },
-    "expectedResults": [
-      {
-        "version" : "v1",
-        "timestamp" : "2015-09-12T00:00:00.000Z",
-        "event" : {
-          "groupedSumAdded" : 9385573
-        }
-      }
-    ]
-  },
-  {
-    "description": "Nested group by query with force push down and filter on 
outer and inner query",
-    "query": {
-      "queryType": "groupBy",
-      "dataSource": {
-        "type": "query",
-        "query": {
-          "queryType": "groupBy",
-          "dataSource": "%%DATASOURCE%%",
-          "intervals": [
-            "2015-09-12/2015-09-13"
-          ],
-          "granularity": "all",
-          "dimensions": [
-            {"dimension" : "channel", "outputName" :"renamedChannel"},
-            {"dimension" : "user", "outputName" :"renamedUser"}
-          ],
-          "metric": "added",
-          "aggregations": [
-            {
-              "type": "longSum",
-              "name": "sumAdded",
-              "fieldName": "added"
-            }
-          ],
-          "filter": {
-            "type": "or",
-            "fields": [
-              {
-                "type": "selector",
-                "dimension": "channel",
-                "value": "#zh.wikipedia"
-              },
-              {
-                "type": "selector",
-                "dimension": "channel",
-                "value": "#es.wikipedia"
-              }
-            ]
-          }
-        }
-      },
-      "granularity": "all",
-      "dimension": "renamedChannel",
-      "aggregations": [
-        {
-          "type": "longSum",
-          "name": "groupedSumAdded",
-          "fieldName": "sumAdded"
-        }
-      ],
-      "intervals": [
-        "2015-09-12/2015-09-13"
-      ],
-      "filter": {
-        "type": "and",
-        "fields": [
-          {
-            "type": "selector",
-            "dimension": "renamedChannel",
-            "value": "#zh.wikipedia"
-          }
-        ]
-      },
-      "context": {
-        "forcePushDownNestedQuery":"true"
-      }
-    },
-    "expectedResults": [
-      {
-        "version" : "v1",
-        "timestamp" : "2015-09-12T00:00:00.000Z",
-        "event" : {
-          "groupedSumAdded" : 191033
-        }
-      }
-    ]
-  },
-  {
-    "description": "Nested group by query with force push down and having 
clause",
-    "query": {
-      "queryType": "groupBy",
-      "dataSource": {
-        "type": "query",
-        "query": {
-          "queryType": "groupBy",
-          "dataSource": "%%DATASOURCE%%",
-          "intervals": [
-            "2015-09-12/2015-09-13"
-          ],
-          "granularity": "all",
-          "dimensions": [
-            {"dimension" : "channel"},
-            {"dimension" : "user"}
-          ],
-          "metric": "added",
-          "aggregations": [
-            {
-              "type": "longSum",
-              "name": "sumAdded",
-              "fieldName": "added"
-            }
-          ]
-        }
-      },
-      "granularity": "all",
-      "aggregations": [
-        {
-          "type": "longSum",
-          "name": "outerSum",
-          "fieldName": "sumAdded"
-        }
-      ],
-      "intervals": [
-        "2015-09-12/2015-09-13"
-      ],
-      "having": {
-        "type": "or",
-        "havingSpecs": [
-          {
-            "type": "greaterThan",
-            "aggregation": "outerSum",
-            "value": 9385570
-          }
-        ]
-      },
-      "context": {
-        "forcePushDownNestedQuery":"true"
-      }
-    },
-    "expectedResults": [
-      {
-        "version" : "v1",
-        "timestamp" : "2015-09-12T00:00:00.000Z",
-        "event" : {
-          "outerSum" : 9385573
-        }
-      }
-    ]
-  },
-  {
-    "description": "Nested group by query with force push down and having 
clause. This test asserts that the post processing was invoked.",
-    "query": {
-      "queryType": "groupBy",
-      "dataSource": {
-        "type": "query",
-        "query": {
-          "queryType": "groupBy",
-          "dataSource": "%%DATASOURCE%%",
-          "intervals": [
-            "2015-09-12/2015-09-13"
-          ],
-          "granularity": "all",
-          "dimensions": [
-            {"dimension" : "channel"},
-            {"dimension" : "user"}
-          ],
-          "metric": "added",
-          "aggregations": [
-            {
-              "type": "longSum",
-              "name": "sumAdded",
-              "fieldName": "added"
-            }
-          ]
-        }
-      },
-      "granularity": "all",
-      "aggregations": [
-        {
-          "type": "longSum",
-          "name": "outerSum",
-          "fieldName": "sumAdded"
-        }
-      ],
-      "intervals": [
-        "2015-09-12/2015-09-13"
-      ],
-      "having": {
-        "type": "or",
-        "havingSpecs": [
-          {
-            "type": "greaterThan",
-            "aggregation": "outerSum",
-            "value": 100000000
-          }
-        ]
-      },
-      "context": {
-        "forcePushDownNestedQuery":"true"
-      }
-    },
-    "expectedResults": [
-    ]
-  }
-]
\ No newline at end of file


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to