This is an automated email from the ASF dual-hosted git repository.

kfaraz pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/druid.git


The following commit(s) were added to refs/heads/master by this push:
     new 326f2c5020 Add more statement attributes to explain plan result. 
(#14391)
326f2c5020 is described below

commit 326f2c5020d4a9a02128efc160b49fbc935fc6f2
Author: Abhishek Radhakrishnan <[email protected]>
AuthorDate: Mon Jun 12 06:48:02 2023 -0700

    Add more statement attributes to explain plan result. (#14391)
    
    This PR adds the following to the ATTRIBUTES column in the explain plan 
output:
    - partitionedBy
    - clusteredBy
    - replaceTimeChunks
    
    This PR leverages the work done in #14074, which added a new column 
ATTRIBUTES
    to encapsulate all the statement-related attributes.
---
 docs/querying/sql-translation.md                   | 233 ++++++++++++++++++++-
 .../sql/calcite/planner/ExplainAttributes.java     |  63 +++++-
 .../druid/sql/calcite/planner/IngestHandler.java   |  10 +-
 .../druid/sql/calcite/planner/QueryHandler.java    |   3 +
 .../druid/sql/avatica/DruidAvaticaHandlerTest.java |   2 +-
 .../druid/sql/calcite/CalciteExplainQueryTest.java |  14 +-
 .../druid/sql/calcite/CalciteInsertDmlTest.java    | 109 +++++++++-
 .../druid/sql/calcite/CalciteReplaceDmlTest.java   |  84 +++++++-
 .../druid/sql/calcite/CalciteSelectQueryTest.java  |   4 +-
 .../druid/sql/calcite/IngestTableFunctionTest.java |   2 +-
 .../sql/calcite/planner/ExplainAttributesTest.java | 130 ++++++++++++
 .../org/apache/druid/sql/http/SqlResourceTest.java |   2 +-
 12 files changed, 631 insertions(+), 25 deletions(-)

diff --git a/docs/querying/sql-translation.md b/docs/querying/sql-translation.md
index 7c2876c68d..5126b9fc35 100644
--- a/docs/querying/sql-translation.md
+++ b/docs/querying/sql-translation.md
@@ -67,9 +67,14 @@ be translated to native.
 EXPLAIN PLAN statements return:
 - a `PLAN` column that contains a JSON array of native queries that Druid will 
run
 - a `RESOURCES` column that describes the resources used in the query
-- a `ATTRIBUTES` column that describes the attributes of a query, such as the 
statement type and target data source
+- an `ATTRIBUTES` column that describes the attributes of the query, including:
+  - `statementType`: the SQL statement type
+  - `targetDataSource`: the target datasource in an INSERT or REPLACE statement
+  - `partitionedBy`: the time-based partitioning granularity in an INSERT or 
REPLACE statement
+  - `clusteredBy`: the clustering columns in an INSERT or REPLACE statement
+  - `replaceTimeChunks`: the time chunks in a REPLACE statement
 
-For example, consider the following query:
+Example 1: EXPLAIN PLAN for a `SELECT` query on the `wikipedia` datasource:
 
 ```sql
 EXPLAIN PLAN FOR
@@ -81,7 +86,7 @@ WHERE channel IN (SELECT page FROM wikipedia GROUP BY page 
ORDER BY COUNT(*) DES
 GROUP BY channel
 ```
 
-The EXPLAIN PLAN statement returns the following result with plan, resources, 
and attributes information in it:
+The above EXPLAIN PLAN query returns the following result:
 
 ```json
 [
@@ -215,8 +220,226 @@ The EXPLAIN PLAN statement returns the following result 
with plan, resources, an
     }
   ],
   {
-    "statementType": "SELECT",
-    "targetDataSource": null
+    "statementType": "SELECT"
+  }
+]
+```
+
+Example 2: EXPLAIN PLAN for a `REPLACE` query that replaces all the data in 
the `wikipedia` datasource:
+
+```sql
+EXPLAIN PLAN FOR
+REPLACE INTO wikipedia
+OVERWRITE ALL
+SELECT
+  TIME_PARSE("timestamp") AS __time,
+  namespace,
+  cityName,
+  countryName,
+  regionIsoCode,
+  metroCode,
+  countryIsoCode,
+  regionName
+FROM TABLE(
+    EXTERN(
+      
'{"type":"http","uris":["https://druid.apache.org/data/wikipedia.json.gz"]}',
+      '{"type":"json"}',
+      
'[{"name":"timestamp","type":"string"},{"name":"namespace","type":"string"},{"name":"cityName","type":"string"},{"name":"countryName","type":"string"},{"name":"regionIsoCode","type":"string"},{"name":"metroCode","type":"long"},{"name":"countryIsoCode","type":"string"},{"name":"regionName","type":"string"}]'
+    )
+  )
+PARTITIONED BY HOUR
+CLUSTERED BY cityName
+```
+
+The above EXPLAIN PLAN query returns the following result:
+
+```json
+[
+  [
+    {
+      "query": {
+        "queryType": "scan",
+        "dataSource": {
+          "type": "external",
+          "inputSource": {
+            "type": "http",
+            "uris": [
+              "https://druid.apache.org/data/wikipedia.json.gz";
+            ]
+          },
+          "inputFormat": {
+            "type": "json",
+            "keepNullColumns": false,
+            "assumeNewlineDelimited": false,
+            "useJsonNodeReader": false
+          },
+          "signature": [
+            {
+              "name": "timestamp",
+              "type": "STRING"
+            },
+            {
+              "name": "namespace",
+              "type": "STRING"
+            },
+            {
+              "name": "cityName",
+              "type": "STRING"
+            },
+            {
+              "name": "countryName",
+              "type": "STRING"
+            },
+            {
+              "name": "regionIsoCode",
+              "type": "STRING"
+            },
+            {
+              "name": "metroCode",
+              "type": "LONG"
+            },
+            {
+              "name": "countryIsoCode",
+              "type": "STRING"
+            },
+            {
+              "name": "regionName",
+              "type": "STRING"
+            }
+          ]
+        },
+        "intervals": {
+          "type": "intervals",
+          "intervals": [
+            "-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z"
+          ]
+        },
+        "virtualColumns": [
+          {
+            "type": "expression",
+            "name": "v0",
+            "expression": "timestamp_parse(\"timestamp\",null,'UTC')",
+            "outputType": "LONG"
+          }
+        ],
+        "resultFormat": "compactedList",
+        "orderBy": [
+          {
+            "columnName": "cityName",
+            "order": "ascending"
+          }
+        ],
+        "columns": [
+          "cityName",
+          "countryIsoCode",
+          "countryName",
+          "metroCode",
+          "namespace",
+          "regionIsoCode",
+          "regionName",
+          "v0"
+        ],
+        "legacy": false,
+        "context": {
+          "finalizeAggregations": false,
+          "groupByEnableMultiValueUnnesting": false,
+          "maxNumTasks": 5,
+          "queryId": "b474c0d5-a5ce-432d-be94-535ccdb7addc",
+          "scanSignature": 
"[{\"name\":\"cityName\",\"type\":\"STRING\"},{\"name\":\"countryIsoCode\",\"type\":\"STRING\"},{\"name\":\"countryName\",\"type\":\"STRING\"},{\"name\":\"metroCode\",\"type\":\"LONG\"},{\"name\":\"namespace\",\"type\":\"STRING\"},{\"name\":\"regionIsoCode\",\"type\":\"STRING\"},{\"name\":\"regionName\",\"type\":\"STRING\"},{\"name\":\"v0\",\"type\":\"LONG\"}]",
+          "sqlInsertSegmentGranularity": "\"HOUR\"",
+          "sqlQueryId": "b474c0d5-a5ce-432d-be94-535ccdb7addc",
+          "sqlReplaceTimeChunks": "all"
+        },
+        "granularity": {
+          "type": "all"
+        }
+      },
+      "signature": [
+        {
+          "name": "v0",
+          "type": "LONG"
+        },
+        {
+          "name": "namespace",
+          "type": "STRING"
+        },
+        {
+          "name": "cityName",
+          "type": "STRING"
+        },
+        {
+          "name": "countryName",
+          "type": "STRING"
+        },
+        {
+          "name": "regionIsoCode",
+          "type": "STRING"
+        },
+        {
+          "name": "metroCode",
+          "type": "LONG"
+        },
+        {
+          "name": "countryIsoCode",
+          "type": "STRING"
+        },
+        {
+          "name": "regionName",
+          "type": "STRING"
+        }
+      ],
+      "columnMappings": [
+        {
+          "queryColumn": "v0",
+          "outputColumn": "__time"
+        },
+        {
+          "queryColumn": "namespace",
+          "outputColumn": "namespace"
+        },
+        {
+          "queryColumn": "cityName",
+          "outputColumn": "cityName"
+        },
+        {
+          "queryColumn": "countryName",
+          "outputColumn": "countryName"
+        },
+        {
+          "queryColumn": "regionIsoCode",
+          "outputColumn": "regionIsoCode"
+        },
+        {
+          "queryColumn": "metroCode",
+          "outputColumn": "metroCode"
+        },
+        {
+          "queryColumn": "countryIsoCode",
+          "outputColumn": "countryIsoCode"
+        },
+        {
+          "queryColumn": "regionName",
+          "outputColumn": "regionName"
+        }
+      ]
+    }
+  ],
+  [
+    {
+      "name": "EXTERNAL",
+      "type": "EXTERNAL"
+    },
+    {
+      "name": "wikipedia",
+      "type": "DATASOURCE"
+    }
+  ],
+  {
+    "statementType": "REPLACE",
+    "targetDataSource": "wikipedia",
+    "partitionedBy": "HOUR",
+    "clusteredBy": "`cityName`",
+    "replaceTimeChunks": "'ALL'"
   }
 ]
 ```
diff --git 
a/sql/src/main/java/org/apache/druid/sql/calcite/planner/ExplainAttributes.java 
b/sql/src/main/java/org/apache/druid/sql/calcite/planner/ExplainAttributes.java
index b793fad96f..8d040f23fa 100644
--- 
a/sql/src/main/java/org/apache/druid/sql/calcite/planner/ExplainAttributes.java
+++ 
b/sql/src/main/java/org/apache/druid/sql/calcite/planner/ExplainAttributes.java
@@ -19,8 +19,11 @@
 
 package org.apache.druid.sql.calcite.planner;
 
+import com.fasterxml.jackson.annotation.JsonInclude;
 import com.fasterxml.jackson.annotation.JsonProperty;
 import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlNodeList;
+import org.apache.druid.java.util.common.granularity.Granularity;
 
 import javax.annotation.Nullable;
 
@@ -34,12 +37,28 @@ public final class ExplainAttributes
   @Nullable
   private final SqlNode targetDataSource;
 
+  @Nullable
+  private final Granularity partitionedBy;
+
+  @Nullable
+  private final SqlNodeList clusteredBy;
+
+  @Nullable
+  private final SqlNode replaceTimeChunks;
+
   public ExplainAttributes(
       @JsonProperty("statementType") final String statementType,
-      @JsonProperty("targetDataSource") @Nullable final SqlNode 
targetDataSource)
+      @JsonProperty("targetDataSource") @Nullable final SqlNode 
targetDataSource,
+      @JsonProperty("partitionedBy") @Nullable final Granularity partitionedBy,
+      @JsonProperty("clusteredBy") @Nullable final SqlNodeList clusteredBy,
+      @JsonProperty("replaceTimeChunks") @Nullable final SqlNode 
replaceTimeChunks
+  )
   {
     this.statementType = statementType;
     this.targetDataSource = targetDataSource;
+    this.partitionedBy = partitionedBy;
+    this.clusteredBy = clusteredBy;
+    this.replaceTimeChunks = replaceTimeChunks;
   }
 
   /**
@@ -53,21 +72,61 @@ public final class ExplainAttributes
 
   /**
    * @return the target datasource in a SQL statement. Returns null
-   * for SELECT/non-DML statements where there is no target datasource.
+   * for SELECT statements where there is no target datasource.
    */
   @Nullable
   @JsonProperty
+  @JsonInclude(JsonInclude.Include.NON_NULL)
   public String getTargetDataSource()
   {
     return targetDataSource == null ? null : targetDataSource.toString();
   }
 
+  /**
+   * @return the time-based partitioning granularity specified in the 
<code>PARTITIONED BY</code> clause
+   * for an INSERT or REPLACE statement. Returns null for SELECT statements.
+   */
+  @Nullable
+  @JsonProperty
+  @JsonInclude(JsonInclude.Include.NON_NULL)
+  public Granularity getPartitionedBy()
+  {
+    return partitionedBy;
+  }
+
+  /**
+   * @return the clustering columns specified in the <code>CLUSTERED BY</code> 
clause
+   * for an INSERT or REPLACE statement. Returns null for SELECT statements.
+   */
+  @Nullable
+  @JsonProperty
+  @JsonInclude(JsonInclude.Include.NON_NULL)
+  public String getClusteredBy()
+  {
+    return clusteredBy == null ? null : clusteredBy.toString();
+  }
+
+  /**
+   * @return the time chunks specified in the <code>OVERWRITE</code> clause
+   * for a REPLACE statement. Returns null for INSERT and SELECT statements.
+   */
+  @Nullable
+  @JsonProperty
+  @JsonInclude(JsonInclude.Include.NON_NULL)
+  public String getReplaceTimeChunks()
+  {
+    return replaceTimeChunks == null ? null : replaceTimeChunks.toString();
+  }
+
   @Override
   public String toString()
   {
     return "ExplainAttributes{" +
            "statementType='" + statementType + '\'' +
            ", targetDataSource=" + targetDataSource +
+           ", partitionedBy=" + partitionedBy +
+           ", clusteredBy=" + clusteredBy +
+           ", replaceTimeChunks=" + replaceTimeChunks +
            '}';
   }
 }
diff --git 
a/sql/src/main/java/org/apache/druid/sql/calcite/planner/IngestHandler.java 
b/sql/src/main/java/org/apache/druid/sql/calcite/planner/IngestHandler.java
index 7aab7148b4..459784eb3b 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/IngestHandler.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/IngestHandler.java
@@ -279,7 +279,10 @@ public abstract class IngestHandler extends QueryHandler
     {
       return new ExplainAttributes(
           DruidSqlInsert.OPERATOR.getName(),
-          sqlNode.getTargetTable()
+          sqlNode.getTargetTable(),
+          sqlNode.getPartitionedBy(),
+          sqlNode.getClusteredBy(),
+          null
       );
     }
   }
@@ -346,7 +349,10 @@ public abstract class IngestHandler extends QueryHandler
     {
       return new ExplainAttributes(
           DruidSqlReplace.OPERATOR.getName(),
-          sqlNode.getTargetTable()
+          sqlNode.getTargetTable(),
+          sqlNode.getPartitionedBy(),
+          sqlNode.getClusteredBy(),
+          sqlNode.getReplaceTimeQuery()
       );
     }
   }
diff --git 
a/sql/src/main/java/org/apache/druid/sql/calcite/planner/QueryHandler.java 
b/sql/src/main/java/org/apache/druid/sql/calcite/planner/QueryHandler.java
index 28e9beffc6..45b4390d0b 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/planner/QueryHandler.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/planner/QueryHandler.java
@@ -236,6 +236,9 @@ public abstract class QueryHandler extends 
SqlStatementHandler.BaseStatementHand
   {
     return new ExplainAttributes(
         "SELECT",
+        null,
+        null,
+        null,
         null
     );
   }
diff --git 
a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java 
b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java
index ccc0d71826..cd036d04c3 100644
--- 
a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java
+++ 
b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java
@@ -471,7 +471,7 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase
                   "RESOURCES",
                   "[{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]",
                   "ATTRIBUTES",
-                  "{\"statementType\":\"SELECT\",\"targetDataSource\":null}"
+                  "{\"statementType\":\"SELECT\"}"
               )
           ),
           getRows(resultSet)
diff --git 
a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java 
b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java
index c4f26ebab1..f819e36759 100644
--- 
a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java
+++ 
b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java
@@ -52,7 +52,7 @@ public class CalciteExplainQueryTest extends 
BaseCalciteQueryTest
                                + 
"\"columnMappings\":[{\"queryColumn\":\"a0\",\"outputColumn\":\"EXPR$0\"}]"
                                + "}]";
     final String resources = "[{\"name\":\"aview\",\"type\":\"VIEW\"}]";
-    final String attributes = 
"{\"statementType\":\"SELECT\",\"targetDataSource\":null}";
+    final String attributes = "{\"statementType\":\"SELECT\"}";
 
     testQuery(
         PLANNER_CONFIG_LEGACY_QUERY_EXPLAIN,
@@ -83,7 +83,7 @@ public class CalciteExplainQueryTest extends 
BaseCalciteQueryTest
         + "    BindableTableScan(table=[[INFORMATION_SCHEMA, COLUMNS]])\n";
 
     final String resources = "[]";
-    final String attributes = 
"{\"statementType\":\"SELECT\",\"targetDataSource\":null}";
+    final String attributes = "{\"statementType\":\"SELECT\"}";
 
     testQuery(
         "EXPLAIN PLAN FOR\n"
@@ -129,7 +129,7 @@ public class CalciteExplainQueryTest extends 
BaseCalciteQueryTest
                                + 
"\"columnMappings\":[{\"queryColumn\":\"a0\",\"outputColumn\":\"EXPR$0\"}]"
                                + "}]";
     final String resources = "[{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]";
-    final String attributes = 
"{\"statementType\":\"SELECT\",\"targetDataSource\":null}";
+    final String attributes = "{\"statementType\":\"SELECT\"}";
 
     testQuery(
         query,
@@ -188,7 +188,7 @@ public class CalciteExplainQueryTest extends 
BaseCalciteQueryTest
                                     + "}]";
     String sql = "EXPLAIN PLAN FOR SELECT * FROM druid.foo";
     String resources = "[{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]";
-    final String attributes = 
"{\"statementType\":\"SELECT\",\"targetDataSource\":null}";
+    final String attributes = "{\"statementType\":\"SELECT\"}";
 
     // Test when default config and no overrides
     testQuery(sql, ImmutableList.of(), ImmutableList.of(new 
Object[]{explanation, resources, attributes}));
@@ -252,7 +252,7 @@ public class CalciteExplainQueryTest extends 
BaseCalciteQueryTest
                                + 
"\"columnMappings\":[{\"queryColumn\":\"dim1\",\"outputColumn\":\"dim1\"}]"
                                + "}]";
     final String resources = "[{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]";
-    final String attributes = 
"{\"statementType\":\"SELECT\",\"targetDataSource\":null}";
+    final String attributes = "{\"statementType\":\"SELECT\"}";
     testQuery(
         PLANNER_CONFIG_LEGACY_QUERY_EXPLAIN,
         query,
@@ -306,7 +306,7 @@ public class CalciteExplainQueryTest extends 
BaseCalciteQueryTest
                                                       + 
"\"columnMappings\":[{\"queryColumn\":\"v0\",\"outputColumn\":\"EXPR$0\"},{\"queryColumn\":\"v1\",\"outputColumn\":\"EXPR$1\"}]"
                                                       + "}]";
     final String expectedResources = 
"[{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]";
-    final String expectedAttributes = 
"{\"statementType\":\"SELECT\",\"targetDataSource\":null}";
+    final String expectedAttributes = "{\"statementType\":\"SELECT\"}";
     testQuery(
         explainSql,
         defaultExprContext,
@@ -371,7 +371,7 @@ public class CalciteExplainQueryTest extends 
BaseCalciteQueryTest
                                 + 
"\"columnMappings\":[{\"queryColumn\":\"v0\",\"outputColumn\":\"EXPR$0\"}]"
                                 + "}]";
     final String expectedResources = 
"[{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]";
-    final String expectedAttributes = 
"{\"statementType\":\"SELECT\",\"targetDataSource\":null}";
+    final String expectedAttributes = "{\"statementType\":\"SELECT\"}";
     // Verify the query plan
     testQuery(
         explainSql,
diff --git 
a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java 
b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java
index d0f06fffa6..641b0ea49b 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteInsertDmlTest.java
@@ -647,6 +647,110 @@ public class CalciteInsertDmlTest extends 
CalciteIngestionDmlTest
     didTest = true;
   }
 
+  @Test
+  public void testExplainPlanInsertWithClusteredBy() throws 
JsonProcessingException
+  {
+    skipVectorize();
+
+    final String resources = 
"[{\"name\":\"dst\",\"type\":\"DATASOURCE\"},{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]";
+    final String attributes = 
"{\"statementType\":\"INSERT\",\"targetDataSource\":\"druid.dst\",\"partitionedBy\":\"DAY\",\"clusteredBy\":\"2,
 `dim1` DESC, CEIL(`m2`)\"}";
+
+    final String sql = "EXPLAIN PLAN FOR INSERT INTO druid.dst "
+                       + "SELECT __time, FLOOR(m1) as floor_m1, dim1, CEIL(m2) 
as ceil_m2 FROM foo "
+                       + "PARTITIONED BY FLOOR(__time TO DAY) CLUSTERED BY 2, 
dim1 DESC, CEIL(m2)";
+
+    ObjectMapper queryJsonMapper = queryFramework().queryJsonMapper();
+    final ScanQuery expectedQuery = newScanQueryBuilder()
+        .dataSource("foo")
+        .intervals(querySegmentSpec(Filtration.eternity()))
+        .columns("__time", "dim1", "v0", "v1")
+        .virtualColumns(
+            expressionVirtualColumn("v0", "floor(\"m1\")", ColumnType.FLOAT),
+            expressionVirtualColumn("v1", "ceil(\"m2\")", ColumnType.DOUBLE)
+        )
+        .orderBy(
+            ImmutableList.of(
+                new ScanQuery.OrderBy("v0", ScanQuery.Order.ASCENDING),
+                new ScanQuery.OrderBy("dim1", ScanQuery.Order.DESCENDING),
+                new ScanQuery.OrderBy("v1", ScanQuery.Order.ASCENDING)
+            )
+        )
+        .context(
+            queryJsonMapper.readValue(
+                
"{\"sqlInsertSegmentGranularity\":\"\\\"DAY\\\"\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}",
+                JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
+            )
+        )
+        .build();
+
+    final String legacyExplanation =
+        "DruidQueryRel(query=["
+        + queryJsonMapper.writeValueAsString(expectedQuery)
+        + "], signature=[{__time:LONG, v0:FLOAT, dim1:STRING, v1:DOUBLE}])\n";
+
+
+    // Use testQuery for EXPLAIN (not testIngestionQuery).
+    testQuery(
+        PLANNER_CONFIG_LEGACY_QUERY_EXPLAIN,
+        ImmutableMap.of("sqlQueryId", "dummy"),
+        Collections.emptyList(),
+        sql,
+        CalciteTests.SUPER_USER_AUTH_RESULT,
+        ImmutableList.of(),
+        new DefaultResultsVerifier(
+            ImmutableList.of(
+                new Object[]{
+                    legacyExplanation,
+                    resources,
+                    attributes
+                }
+            ),
+            null
+        ),
+        null
+    );
+
+    // Test correctness of the query when only the CLUSTERED BY clause is 
present
+    final String explanation =
+        "["
+        + "{\"query\":{\"queryType\":\"scan\","
+        + 
"\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},"
+        + 
"\"virtualColumns\":[{\"type\":\"expression\",\"name\":\"v0\",\"expression\":\"floor(\\\"m1\\\")\",\"outputType\":\"FLOAT\"},"
+        + 
"{\"type\":\"expression\",\"name\":\"v1\",\"expression\":\"ceil(\\\"m2\\\")\",\"outputType\":\"DOUBLE\"}],"
+        + "\"resultFormat\":\"compactedList\","
+        + 
"\"orderBy\":[{\"columnName\":\"v0\",\"order\":\"ascending\"},{\"columnName\":\"dim1\",\"order\":\"descending\"},"
+        + 
"{\"columnName\":\"v1\",\"order\":\"ascending\"}],\"columns\":[\"__time\",\"dim1\",\"v0\",\"v1\"],\"legacy\":false,"
+        + 
"\"context\":{\"sqlInsertSegmentGranularity\":\"\\\"DAY\\\"\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},\"granularity\":{\"type\":\"all\"}},"
+        + 
"\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"v0\",\"type\":\"FLOAT\"},{\"name\":\"dim1\",\"type\":\"STRING\"},"
+        + "{\"name\":\"v1\",\"type\":\"DOUBLE\"}],"
+        + 
"\"columnMappings\":[{\"queryColumn\":\"__time\",\"outputColumn\":\"__time\"},{\"queryColumn\":\"v0\",\"outputColumn\":\"floor_m1\"},"
+        + 
"{\"queryColumn\":\"dim1\",\"outputColumn\":\"dim1\"},{\"queryColumn\":\"v1\",\"outputColumn\":\"ceil_m2\"}]"
+        + "}]";
+
+    testQuery(
+        PLANNER_CONFIG_NATIVE_QUERY_EXPLAIN,
+        ImmutableMap.of("sqlQueryId", "dummy"),
+        Collections.emptyList(),
+        sql,
+        CalciteTests.SUPER_USER_AUTH_RESULT,
+        ImmutableList.of(),
+        new DefaultResultsVerifier(
+            ImmutableList.of(
+                new Object[]{
+                    explanation,
+                    resources,
+                    attributes
+                }
+            ),
+            null
+        ),
+        null
+    );
+
+    // Not using testIngestionQuery, so must set didTest manually to satisfy 
the check in tearDown.
+    didTest = true;
+  }
+
   @Test
   public void testInsertWithClusteredBy()
   {
@@ -893,8 +997,7 @@ public class CalciteInsertDmlTest extends 
CalciteIngestionDmlTest
         + "}]";
 
     final String resources = 
"[{\"name\":\"EXTERNAL\",\"type\":\"EXTERNAL\"},{\"name\":\"dst\",\"type\":\"DATASOURCE\"}]";
-    final String attributes = 
"{\"statementType\":\"INSERT\",\"targetDataSource\":\"dst\"}";
-
+    final String attributes = 
"{\"statementType\":\"INSERT\",\"targetDataSource\":\"dst\",\"partitionedBy\":{\"type\":\"all\"}}";
 
     // Use testQuery for EXPLAIN (not testIngestionQuery).
     testQuery(
@@ -998,7 +1101,7 @@ public class CalciteInsertDmlTest extends 
CalciteIngestionDmlTest
         + "}]";
 
     final String resources = 
"[{\"name\":\"dst\",\"type\":\"DATASOURCE\"},{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]";
-    final String attributes = 
"{\"statementType\":\"INSERT\",\"targetDataSource\":\"druid.dst\"}";
+    final String attributes = 
"{\"statementType\":\"INSERT\",\"targetDataSource\":\"druid.dst\",\"partitionedBy\":\"DAY\",\"clusteredBy\":\"2,
 `dim1` DESC, CEIL(`m2`)\"}";
 
     // Use testQuery for EXPLAIN (not testIngestionQuery).
     testQuery(
diff --git 
a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteReplaceDmlTest.java 
b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteReplaceDmlTest.java
index 2192fe246c..fb922ffe77 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteReplaceDmlTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteReplaceDmlTest.java
@@ -636,7 +636,7 @@ public class CalciteReplaceDmlTest extends 
CalciteIngestionDmlTest
                 + 
"\"columnMappings\":[{\"queryColumn\":\"x\",\"outputColumn\":\"x\"},{\"queryColumn\":\"y\",\"outputColumn\":\"y\"},{\"queryColumn\":\"z\",\"outputColumn\":\"z\"}]}]";
 
     final String resources = 
"[{\"name\":\"EXTERNAL\",\"type\":\"EXTERNAL\"},{\"name\":\"dst\",\"type\":\"DATASOURCE\"}]";
-    final String attributes = 
"{\"statementType\":\"REPLACE\",\"targetDataSource\":\"dst\"}";
+    final String attributes = 
"{\"statementType\":\"REPLACE\",\"targetDataSource\":\"dst\",\"partitionedBy\":{\"type\":\"all\"},\"replaceTimeChunks\":\"'ALL'\"}";
 
     // Use testQuery for EXPLAIN (not testIngestionQuery).
     testQuery(
@@ -683,6 +683,88 @@ public class CalciteReplaceDmlTest extends 
CalciteIngestionDmlTest
     didTest = true;
   }
 
+  @Test
+  public void testExplainReplaceTimeChunksWithPartitioningAndClustering() 
throws IOException
+  {
+    // Skip vectorization since otherwise the "context" will change for each 
subtest.
+    skipVectorize();
+
+    ObjectMapper queryJsonMapper = queryFramework().queryJsonMapper();
+    final ScanQuery expectedQuery = newScanQueryBuilder()
+        .dataSource("foo")
+        .intervals(querySegmentSpec(Filtration.eternity()))
+        .columns("__time", "cnt", "dim1", "dim2", "dim3", "m1", "m2", 
"unique_dim1")
+        .orderBy(
+            ImmutableList.of(
+                new ScanQuery.OrderBy("dim1", ScanQuery.Order.ASCENDING)
+            )
+        )
+        .context(
+            queryJsonMapper.readValue(
+                
"{\"sqlInsertSegmentGranularity\":\"\\\"DAY\\\"\",\"sqlQueryId\":\"dummy\",\"sqlReplaceTimeChunks\":\"2000-01-01T00:00:00.000Z/2000-01-02T00:00:00.000Z\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"}",
+                JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
+            )
+        )
+        .build();
+
+    final String legacyExplanation =
+        "DruidQueryRel(query=["
+        + queryJsonMapper.writeValueAsString(expectedQuery)
+        + "], signature=[{__time:LONG, dim1:STRING, dim2:STRING, dim3:STRING, 
cnt:LONG, m1:FLOAT, m2:DOUBLE, unique_dim1:COMPLEX<hyperUnique>}])\n";
+
+    final String explanation = 
"[{\"query\":{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"orderBy\":[{\"columnName\":\"dim1\",\"order\":\"ascending\"}],\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"sqlInsertSegmentGranularity\":
 [...]
+    final String resources = 
"[{\"name\":\"dst\",\"type\":\"DATASOURCE\"},{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]";
+    final String attributes = 
"{\"statementType\":\"REPLACE\",\"targetDataSource\":\"dst\",\"partitionedBy\":\"DAY\",\"clusteredBy\":\"`dim1`\",\"replaceTimeChunks\":\"`__time`
 >= TIMESTAMP '2000-01-01 00:00:00' AND `__time` < TIMESTAMP '2000-01-02 
00:00:00'\"}";
+
+    final String sql = "EXPLAIN PLAN FOR"
+                       + " REPLACE INTO dst"
+                       + " OVERWRITE WHERE __time >= TIMESTAMP '2000-01-01 
00:00:00' AND __time < TIMESTAMP '2000-01-02 00:00:00' "
+                       + "SELECT * FROM foo PARTITIONED BY DAY CLUSTERED BY 
dim1";
+    // Use testQuery for EXPLAIN (not testIngestionQuery).
+    testQuery(
+        PLANNER_CONFIG_LEGACY_QUERY_EXPLAIN,
+        ImmutableMap.of("sqlQueryId", "dummy"),
+        Collections.emptyList(),
+        sql,
+        CalciteTests.SUPER_USER_AUTH_RESULT,
+        ImmutableList.of(),
+        new DefaultResultsVerifier(
+            ImmutableList.of(
+                new Object[]{
+                    legacyExplanation,
+                    resources,
+                    attributes
+                }
+            ),
+            null
+        ),
+        null
+    );
+
+    testQuery(
+        PLANNER_CONFIG_NATIVE_QUERY_EXPLAIN,
+        ImmutableMap.of("sqlQueryId", "dummy"),
+        Collections.emptyList(),
+        sql,
+        CalciteTests.SUPER_USER_AUTH_RESULT,
+        ImmutableList.of(),
+        new DefaultResultsVerifier(
+            ImmutableList.of(
+                new Object[]{
+                    explanation,
+                    resources,
+                    attributes
+                }
+            ),
+            null
+        ),
+        null
+    );
+
+    // Not using testIngestionQuery, so must set didTest manually to satisfy 
the check in tearDown.
+    didTest = true;
+  }
+
   @Test
   public void testExplainReplaceFromExternalUnauthorized()
   {
diff --git 
a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java 
b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java
index 80e9f01c40..54d8e856af 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java
@@ -544,7 +544,7 @@ public class CalciteSelectQueryTest extends 
BaseCalciteQueryTest
                                + "}]";
     final String legacyExplanation = 
"DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"inline\",\"columnNames\":[\"EXPR$0\"],\"columnTypes\":[\"LONG\"],\"rows\":[[2]]},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"EXPR$0\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimesta
 [...]
     final String resources = "[]";
-    final String attributes = 
"{\"statementType\":\"SELECT\",\"targetDataSource\":null}";
+    final String attributes = "{\"statementType\":\"SELECT\"}";
 
     testQuery(
         PLANNER_CONFIG_LEGACY_QUERY_EXPLAIN,
@@ -1290,7 +1290,7 @@ public class CalciteSelectQueryTest extends 
BaseCalciteQueryTest
                                + 
"\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX<hyperUnique>\"}],"
                                + 
"\"columnMappings\":[{\"queryColumn\":\"__time\",\"outputColumn\":\"__time\"},{\"queryColumn\":\"dim1\",\"outputColumn\":\"dim1\"},{\"queryColumn\":\"dim2\",\"outputColumn\":\"dim2\"},{\"queryColumn\":\"dim3\",\"outputColumn\":\"dim3\"},{\"queryColumn\":\"cnt\",\"outputColumn\":\"cnt\"},{\"queryColumn\":\"m1\",\"outputColumn\":\"m1\"},{\"queryColumn\":\"m2\",\"outputColumn\":\"m2\"},{\"queryColumn\":\"unique_dim1\",\"outputColumn\":\"unique_dim1\"}]}]";
     final String resources = "[{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]";
-    final String attributes = 
"{\"statementType\":\"SELECT\",\"targetDataSource\":null}";
+    final String attributes = "{\"statementType\":\"SELECT\"}";
 
     testQuery(
         PLANNER_CONFIG_LEGACY_QUERY_EXPLAIN,
diff --git 
a/sql/src/test/java/org/apache/druid/sql/calcite/IngestTableFunctionTest.java 
b/sql/src/test/java/org/apache/druid/sql/calcite/IngestTableFunctionTest.java
index 4b9d4382c6..a3ab06ab28 100644
--- 
a/sql/src/test/java/org/apache/druid/sql/calcite/IngestTableFunctionTest.java
+++ 
b/sql/src/test/java/org/apache/druid/sql/calcite/IngestTableFunctionTest.java
@@ -318,7 +318,7 @@ public class IngestTableFunctionTest extends 
CalciteIngestionDmlTest
         
"\"signature\":[{\"name\":\"x\",\"type\":\"STRING\"},{\"name\":\"y\",\"type\":\"STRING\"},{\"name\":\"z\",\"type\":\"LONG\"}],"
 +
         
"\"columnMappings\":[{\"queryColumn\":\"x\",\"outputColumn\":\"x\"},{\"queryColumn\":\"y\",\"outputColumn\":\"y\"},{\"queryColumn\":\"z\",\"outputColumn\":\"z\"}]}]";
     final String resources = 
"[{\"name\":\"EXTERNAL\",\"type\":\"EXTERNAL\"},{\"name\":\"dst\",\"type\":\"DATASOURCE\"}]";
-    final String attributes = 
"{\"statementType\":\"INSERT\",\"targetDataSource\":\"dst\"}";
+    final String attributes = 
"{\"statementType\":\"INSERT\",\"targetDataSource\":\"dst\",\"partitionedBy\":{\"type\":\"all\"}}";
 
     testQuery(
         PLANNER_CONFIG_NATIVE_QUERY_EXPLAIN,
diff --git 
a/sql/src/test/java/org/apache/druid/sql/calcite/planner/ExplainAttributesTest.java
 
b/sql/src/test/java/org/apache/druid/sql/calcite/planner/ExplainAttributesTest.java
new file mode 100644
index 0000000000..0b3466634c
--- /dev/null
+++ 
b/sql/src/test/java/org/apache/druid/sql/calcite/planner/ExplainAttributesTest.java
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.sql.calcite.planner;
+
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.apache.calcite.sql.SqlNode;
+import org.apache.calcite.sql.SqlNodeList;
+import org.apache.druid.jackson.DefaultObjectMapper;
+import org.apache.druid.java.util.common.granularity.Granularities;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+import org.mockito.Mockito;
+
+public class ExplainAttributesTest
+{
+  private static final ObjectMapper DEFAULT_OBJECT_MAPPER = new 
DefaultObjectMapper();
+  private static final SqlNode DATA_SOURCE = Mockito.mock(SqlNode.class);
+  private static final SqlNodeList CLUSTERED_BY = 
Mockito.mock(SqlNodeList.class);
+  private static final SqlNode TIME_CHUNKS = Mockito.mock(SqlNode.class);
+
+  @Before
+  public void setup()
+  {
+    Mockito.when(DATA_SOURCE.toString()).thenReturn("foo");
+    Mockito.when(CLUSTERED_BY.toString()).thenReturn("`bar`, `jazz`");
+    Mockito.when(TIME_CHUNKS.toString()).thenReturn("ALL");
+  }
+
+  @Test
+  public void testSimpleGetters()
+  {
+    ExplainAttributes selectAttributes = new ExplainAttributes("SELECT", null, 
null, null, null);
+    Assert.assertEquals("SELECT", selectAttributes.getStatementType());
+    Assert.assertNull(selectAttributes.getTargetDataSource());
+    Assert.assertNull(selectAttributes.getPartitionedBy());
+    Assert.assertNull(selectAttributes.getClusteredBy());
+    Assert.assertNull(selectAttributes.getReplaceTimeChunks());
+  }
+
+  @Test
+  public void testSerializeSelectAttributes() throws JsonProcessingException
+  {
+    ExplainAttributes selectAttributes = new ExplainAttributes(
+        "SELECT",
+        null,
+        null,
+        null,
+        null
+    );
+    final String expectedAttributes = "{"
+                                      + "\"statementType\":\"SELECT\""
+                                      + "}";
+    Assert.assertEquals(expectedAttributes, 
DEFAULT_OBJECT_MAPPER.writeValueAsString(selectAttributes));
+  }
+
+  @Test
+  public void testSerializeInsertAttributes() throws JsonProcessingException
+  {
+    ExplainAttributes insertAttributes = new ExplainAttributes(
+        "INSERT",
+        DATA_SOURCE,
+        Granularities.DAY,
+        null,
+        null
+    );
+    final String expectedAttributes = "{"
+                                      + "\"statementType\":\"INSERT\","
+                                      + "\"targetDataSource\":\"foo\","
+                                      + "\"partitionedBy\":\"DAY\""
+                                      + "}";
+    Assert.assertEquals(expectedAttributes, 
DEFAULT_OBJECT_MAPPER.writeValueAsString(insertAttributes));
+  }
+
+  @Test
+  public void testSerializeInsertAllAttributes() throws JsonProcessingException
+  {
+    ExplainAttributes insertAttributes = new ExplainAttributes(
+        "INSERT",
+        DATA_SOURCE,
+        Granularities.ALL,
+        null,
+        null
+    );
+    final String expectedAttributes = "{"
+                                      + "\"statementType\":\"INSERT\","
+                                      + "\"targetDataSource\":\"foo\","
+                                      + "\"partitionedBy\":{\"type\":\"all\"}"
+                                      + "}";
+    Assert.assertEquals(expectedAttributes, 
DEFAULT_OBJECT_MAPPER.writeValueAsString(insertAttributes));
+  }
+
+  @Test
+  public void testSerializeReplaceAttributes() throws JsonProcessingException
+  {
+    ExplainAttributes replaceAttributes = new ExplainAttributes(
+        "REPLACE",
+        DATA_SOURCE,
+        Granularities.HOUR,
+        CLUSTERED_BY,
+        TIME_CHUNKS
+    );
+    final String expectedAttributes = "{"
+        + "\"statementType\":\"REPLACE\","
+        + "\"targetDataSource\":\"foo\","
+        + "\"partitionedBy\":\"HOUR\","
+        + "\"clusteredBy\":\"`bar`, `jazz`\","
+        + "\"replaceTimeChunks\":\"ALL\""
+        + "}";
+    Assert.assertEquals(expectedAttributes, 
DEFAULT_OBJECT_MAPPER.writeValueAsString(replaceAttributes));
+  }
+}
diff --git a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java 
b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java
index 2a80011a9b..f825423ba9 100644
--- a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java
@@ -1328,7 +1328,7 @@ public class SqlResourceTest extends CalciteTestBase
                 "RESOURCES",
                 "[{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]",
                 "ATTRIBUTES",
-                "{\"statementType\":\"SELECT\",\"targetDataSource\":null}"
+                "{\"statementType\":\"SELECT\"}"
             )
         ),
         rows


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to