This is an automated email from the ASF dual-hosted git repository.
gian pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/druid.git
The following commit(s) were added to refs/heads/master by this push:
new edfbcc8455 Preserve column order in DruidSchema, SegmentMetadataQuery.
(#12754)
edfbcc8455 is described below
commit edfbcc8455bcc71fd5584e4beceece5cd50b8828
Author: Gian Merlino <[email protected]>
AuthorDate: Fri Jul 8 22:04:11 2022 -0700
Preserve column order in DruidSchema, SegmentMetadataQuery. (#12754)
* Preserve column order in DruidSchema, SegmentMetadataQuery.
Instead of putting columns in alphabetical order. This is helpful
because it makes query order better match ingestion order. It also
allows tools, like the reindexing flow in the web console, to more
easily do follow-on ingestions using a column order that matches the
pre-existing column order.
We prefer the order from the latest segments. The logic takes all
columns from the latest segments in the order they appear, then adds
on columns from older segments after those.
* Additional test adjustments.
* Adjust imports.
---
.../DruidSchemaInternRowSignatureBenchmark.java | 5 +-
.../druid/query/metadata/SegmentAnalyzer.java | 5 +-
.../SegmentMetadataQueryQueryToolChest.java | 34 +-
.../SegmentMetadataQueryRunnerFactory.java | 4 +-
.../query/metadata/metadata/SegmentAnalysis.java | 12 +-
.../java/org/apache/druid/segment/IndexIO.java | 5 +-
.../segment/QueryableIndexStorageAdapter.java | 14 +-
.../org/apache/druid/query/DoubleStorageTest.java | 152 +++----
.../druid/query/metadata/SegmentAnalysisTest.java | 89 ++++
.../druid/query/metadata/SegmentAnalyzerTest.java | 42 +-
.../SegmentMetadataQueryQueryToolChestTest.java | 58 +--
.../query/metadata/SegmentMetadataQueryTest.java | 468 +++++++++++----------
.../metadata/SegmentMetadataUnionQueryTest.java | 29 +-
.../druid/sql/calcite/schema/DruidSchema.java | 5 +-
.../druid/sql/avatica/DruidAvaticaHandlerTest.java | 40 +-
.../druid/sql/avatica/DruidStatementTest.java | 6 +-
.../druid/sql/calcite/CalciteExplainQueryTest.java | 12 +-
.../druid/sql/calcite/CalciteIngestionDmlTest.java | 2 +-
.../apache/druid/sql/calcite/CalciteQueryTest.java | 4 +-
.../druid/sql/calcite/CalciteSelectQueryTest.java | 50 +--
.../druid/sql/calcite/schema/DruidSchemaTest.java | 121 +++---
.../org/apache/druid/sql/http/SqlResourceTest.java | 30 +-
22 files changed, 689 insertions(+), 498 deletions(-)
diff --git
a/benchmarks/src/test/java/org/apache/druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java
b/benchmarks/src/test/java/org/apache/druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java
index e05d1549a4..c9b46d678e 100644
---
a/benchmarks/src/test/java/org/apache/druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java
+++
b/benchmarks/src/test/java/org/apache/druid/benchmark/DruidSchemaInternRowSignatureBenchmark.java
@@ -57,8 +57,7 @@ import org.openjdk.jmh.annotations.Warmup;
import org.openjdk.jmh.infra.Blackhole;
import java.io.IOException;
-import java.util.HashMap;
-import java.util.Map;
+import java.util.LinkedHashMap;
import java.util.Set;
import java.util.concurrent.TimeUnit;
@@ -113,7 +112,7 @@ public class DruidSchemaInternRowSignatureBenchmark
protected Sequence<SegmentAnalysis>
runSegmentMetadataQuery(Iterable<SegmentId> segments)
{
final int numColumns = 1000;
- Map<String, ColumnAnalysis> columnToAnalysisMap = new HashMap<>();
+ LinkedHashMap<String, ColumnAnalysis> columnToAnalysisMap = new
LinkedHashMap<>();
for (int i = 0; i < numColumns; ++i) {
columnToAnalysisMap.put(
"col" + i,
diff --git
a/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java
b/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java
index ca26ce6950..88cc5dcb8f 100644
---
a/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java
+++
b/processing/src/main/java/org/apache/druid/query/metadata/SegmentAnalyzer.java
@@ -58,8 +58,8 @@ import org.joda.time.Interval;
import javax.annotation.Nullable;
import java.io.IOException;
import java.util.EnumSet;
+import java.util.LinkedHashMap;
import java.util.Map;
-import java.util.TreeMap;
public class SegmentAnalyzer
{
@@ -98,7 +98,8 @@ public class SegmentAnalyzer
// get length and column names from storageAdapter
final int length = storageAdapter.getNumRows();
- Map<String, ColumnAnalysis> columns = new TreeMap<>();
+ // Use LinkedHashMap to preserve column order.
+ final Map<String, ColumnAnalysis> columns = new LinkedHashMap<>();
final RowSignature rowSignature = storageAdapter.getRowSignature();
for (String columnName : rowSignature.getColumnNames()) {
diff --git
a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java
b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java
index 1bb24ef2e7..45cc18ff5a 100644
---
a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java
+++
b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChest.java
@@ -54,17 +54,19 @@ import
org.apache.druid.query.metadata.metadata.ColumnAnalysis;
import org.apache.druid.query.metadata.metadata.SegmentAnalysis;
import org.apache.druid.query.metadata.metadata.SegmentMetadataQuery;
import org.apache.druid.timeline.LogicalSegment;
+import org.apache.druid.timeline.SegmentId;
import org.joda.time.DateTime;
import org.joda.time.Interval;
+import javax.annotation.Nullable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
-import java.util.TreeMap;
import java.util.function.BinaryOperator;
public class SegmentMetadataQueryQueryToolChest extends
QueryToolChest<SegmentAnalysis, SegmentMetadataQuery>
@@ -108,7 +110,8 @@ public class SegmentMetadataQueryQueryToolChest extends
QueryToolChest<SegmentAn
ResponseContext context
)
{
- SegmentMetadataQuery updatedQuery = ((SegmentMetadataQuery)
queryPlus.getQuery()).withFinalizedAnalysisTypes(config);
+ SegmentMetadataQuery updatedQuery = ((SegmentMetadataQuery)
queryPlus.getQuery()).withFinalizedAnalysisTypes(
+ config);
QueryPlus<SegmentAnalysis> updatedQueryPlus =
queryPlus.withQuery(updatedQuery);
return new MappedSequence<>(
CombiningSequence.create(
@@ -135,7 +138,12 @@ public class SegmentMetadataQueryQueryToolChest extends
QueryToolChest<SegmentAn
@Override
public BinaryOperator<SegmentAnalysis> createMergeFn(Query<SegmentAnalysis>
query)
{
- return (arg1, arg2) -> mergeAnalyses(arg1, arg2, ((SegmentMetadataQuery)
query).isLenientAggregatorMerge());
+ return (arg1, arg2) -> mergeAnalyses(
+ Iterables.getFirst(query.getDataSource().getTableNames(), null),
+ arg1,
+ arg2,
+ ((SegmentMetadataQuery) query).isLenientAggregatorMerge()
+ );
}
@Override
@@ -246,8 +254,9 @@ public class SegmentMetadataQueryQueryToolChest extends
QueryToolChest<SegmentAn
@VisibleForTesting
public static SegmentAnalysis mergeAnalyses(
- final SegmentAnalysis arg1,
- final SegmentAnalysis arg2,
+ @Nullable String dataSource,
+ SegmentAnalysis arg1,
+ SegmentAnalysis arg2,
boolean lenientAggregatorMerge
)
{
@@ -259,6 +268,19 @@ public class SegmentMetadataQueryQueryToolChest extends
QueryToolChest<SegmentAn
return arg1;
}
+ // Swap arg1, arg2 so the later-ending interval is first. This ensures we
prefer the latest column order.
+ // We're preserving it so callers can see columns in their natural order.
+ if (dataSource != null) {
+ final SegmentId id1 = SegmentId.tryParse(dataSource, arg1.getId());
+ final SegmentId id2 = SegmentId.tryParse(dataSource, arg2.getId());
+
+ if (id1 != null && id2 != null &&
id2.getIntervalEnd().isAfter(id1.getIntervalEnd())) {
+ final SegmentAnalysis tmp = arg1;
+ arg1 = arg2;
+ arg2 = tmp;
+ }
+ }
+
List<Interval> newIntervals = null;
if (arg1.getIntervals() != null) {
newIntervals = new ArrayList<>(arg1.getIntervals());
@@ -272,7 +294,7 @@ public class SegmentMetadataQueryQueryToolChest extends
QueryToolChest<SegmentAn
final Map<String, ColumnAnalysis> leftColumns = arg1.getColumns();
final Map<String, ColumnAnalysis> rightColumns = arg2.getColumns();
- Map<String, ColumnAnalysis> columns = new TreeMap<>();
+ final LinkedHashMap<String, ColumnAnalysis> columns = new
LinkedHashMap<>();
Set<String> rightColumnNames = Sets.newHashSet(rightColumns.keySet());
for (Map.Entry<String, ColumnAnalysis> entry : leftColumns.entrySet()) {
diff --git
a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java
b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java
index c07ab5d6b7..bc7dc9339b 100644
---
a/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java
+++
b/processing/src/main/java/org/apache/druid/query/metadata/SegmentMetadataQueryRunnerFactory.java
@@ -52,9 +52,9 @@ import org.joda.time.Interval;
import java.util.Collections;
import java.util.HashMap;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
-import java.util.TreeMap;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
@@ -98,7 +98,7 @@ public class SegmentMetadataQueryRunnerFactory implements
QueryRunnerFactory<Seg
totalSize = analyzedColumns.size() * numRows;
}
- Map<String, ColumnAnalysis> columns = new TreeMap<>();
+ LinkedHashMap<String, ColumnAnalysis> columns = new LinkedHashMap<>();
ColumnIncluderator includerator = updatedQuery.getToInclude();
for (Map.Entry<String, ColumnAnalysis> entry :
analyzedColumns.entrySet()) {
final String columnName = entry.getKey();
diff --git
a/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentAnalysis.java
b/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentAnalysis.java
index 71421366b4..13576a6a11 100644
---
a/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentAnalysis.java
+++
b/processing/src/main/java/org/apache/druid/query/metadata/metadata/SegmentAnalysis.java
@@ -26,6 +26,7 @@ import
org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.query.aggregation.AggregatorFactory;
import org.joda.time.Interval;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@@ -42,7 +43,12 @@ public class SegmentAnalysis implements
Comparable<SegmentAnalysis>
*/
private final String id;
private final List<Interval> interval;
- private final Map<String, ColumnAnalysis> columns;
+
+ /**
+ * Require LinkedHashMap to emphasize how important column order is. It's
used by DruidSchema to keep
+ * SQL column order in line with ingestion column order.
+ */
+ private final LinkedHashMap<String, ColumnAnalysis> columns;
private final long size;
private final long numRows;
private final Map<String, AggregatorFactory> aggregators;
@@ -54,7 +60,7 @@ public class SegmentAnalysis implements
Comparable<SegmentAnalysis>
public SegmentAnalysis(
@JsonProperty("id") String id,
@JsonProperty("intervals") List<Interval> interval,
- @JsonProperty("columns") Map<String, ColumnAnalysis> columns,
+ @JsonProperty("columns") LinkedHashMap<String, ColumnAnalysis> columns,
@JsonProperty("size") long size,
@JsonProperty("numRows") long numRows,
@JsonProperty("aggregators") Map<String, AggregatorFactory> aggregators,
@@ -87,7 +93,7 @@ public class SegmentAnalysis implements
Comparable<SegmentAnalysis>
}
@JsonProperty
- public Map<String, ColumnAnalysis> getColumns()
+ public LinkedHashMap<String, ColumnAnalysis> getColumns()
{
return columns;
}
diff --git a/processing/src/main/java/org/apache/druid/segment/IndexIO.java
b/processing/src/main/java/org/apache/druid/segment/IndexIO.java
index f593f104eb..9698ebdc2b 100644
--- a/processing/src/main/java/org/apache/druid/segment/IndexIO.java
+++ b/processing/src/main/java/org/apache/druid/segment/IndexIO.java
@@ -79,6 +79,7 @@ import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
@@ -442,7 +443,7 @@ public class IndexIO
{
MMappedIndex index = legacyHandler.mapDir(inDir);
- Map<String, Supplier<ColumnHolder>> columns = new HashMap<>();
+ Map<String, Supplier<ColumnHolder>> columns = new LinkedHashMap<>();
for (String dimension : index.getAvailableDimensions()) {
ColumnBuilder builder = new ColumnBuilder()
@@ -624,7 +625,7 @@ public class IndexIO
}
}
- Map<String, Supplier<ColumnHolder>> columns = new HashMap<>();
+ Map<String, Supplier<ColumnHolder>> columns = new LinkedHashMap<>();
// Register the time column
ByteBuffer timeBuffer = smooshedFiles.mapFile("__time");
diff --git
a/processing/src/main/java/org/apache/druid/segment/QueryableIndexStorageAdapter.java
b/processing/src/main/java/org/apache/druid/segment/QueryableIndexStorageAdapter.java
index e773ff78da..c0c117b8c8 100644
---
a/processing/src/main/java/org/apache/druid/segment/QueryableIndexStorageAdapter.java
+++
b/processing/src/main/java/org/apache/druid/segment/QueryableIndexStorageAdapter.java
@@ -20,7 +20,6 @@
package org.apache.druid.segment;
import com.google.common.annotations.VisibleForTesting;
-import com.google.common.collect.Sets;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.granularity.Granularities;
@@ -44,8 +43,9 @@ import org.joda.time.Interval;
import javax.annotation.Nullable;
import java.io.IOException;
import java.io.UncheckedIOException;
-import java.util.HashSet;
+import java.util.LinkedHashSet;
import java.util.Objects;
+import java.util.Set;
/**
*
@@ -82,8 +82,14 @@ public class QueryableIndexStorageAdapter implements
StorageAdapter
@Override
public Iterable<String> getAvailableMetrics()
{
- HashSet<String> columnNames = Sets.newHashSet(index.getColumnNames());
- return Sets.difference(columnNames,
Sets.newHashSet(index.getAvailableDimensions()));
+ // Use LinkedHashSet to preserve the original order.
+ final Set<String> columnNames = new
LinkedHashSet<>(index.getColumnNames());
+
+ for (final String dimension : index.getAvailableDimensions()) {
+ columnNames.remove(dimension);
+ }
+
+ return columnNames;
}
@Override
diff --git
a/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java
b/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java
index d0719e7564..9dc8ee6847 100644
--- a/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java
+++ b/processing/src/test/java/org/apache/druid/query/DoubleStorageTest.java
@@ -73,6 +73,7 @@ import java.io.IOException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@@ -153,42 +154,44 @@ public class DoubleStorageTest
SegmentAnalysis expectedSegmentAnalysisDouble = new SegmentAnalysis(
SEGMENT_ID.toString(),
ImmutableList.of(INTERVAL),
- ImmutableMap.of(
- TIME_COLUMN,
- new ColumnAnalysis(
- ColumnType.LONG,
- ValueType.LONG.name(),
- false,
- false,
- 100,
- null,
- null,
- null,
- null
- ),
- DIM_NAME,
- new ColumnAnalysis(
- ColumnType.STRING,
- ValueType.STRING.name(),
- false,
- false,
- 120,
- 1,
- DIM_VALUE,
- DIM_VALUE,
- null
- ),
- DIM_FLOAT_NAME,
- new ColumnAnalysis(
- ColumnType.DOUBLE,
- ValueType.DOUBLE.name(),
- false,
- false,
- 80,
- null,
- null,
- null,
- null
+ new LinkedHashMap<>(
+ ImmutableMap.of(
+ TIME_COLUMN,
+ new ColumnAnalysis(
+ ColumnType.LONG,
+ ValueType.LONG.name(),
+ false,
+ false,
+ 100,
+ null,
+ null,
+ null,
+ null
+ ),
+ DIM_NAME,
+ new ColumnAnalysis(
+ ColumnType.STRING,
+ ValueType.STRING.name(),
+ false,
+ false,
+ 120,
+ 1,
+ DIM_VALUE,
+ DIM_VALUE,
+ null
+ ),
+ DIM_FLOAT_NAME,
+ new ColumnAnalysis(
+ ColumnType.DOUBLE,
+ ValueType.DOUBLE.name(),
+ false,
+ false,
+ 80,
+ null,
+ null,
+ null,
+ null
+ )
)
), 330,
MAX_ROWS,
@@ -201,44 +204,47 @@ public class DoubleStorageTest
SegmentAnalysis expectedSegmentAnalysisFloat = new SegmentAnalysis(
SEGMENT_ID.toString(),
ImmutableList.of(INTERVAL),
- ImmutableMap.of(
- TIME_COLUMN,
- new ColumnAnalysis(
- ColumnType.LONG,
- ValueType.LONG.name(),
- false,
- false,
- 100,
- null,
- null,
- null,
- null
- ),
- DIM_NAME,
- new ColumnAnalysis(
- ColumnType.STRING,
- ValueType.STRING.name(),
- false,
- false,
- 120,
- 1,
- DIM_VALUE,
- DIM_VALUE,
- null
- ),
- DIM_FLOAT_NAME,
- new ColumnAnalysis(
- ColumnType.FLOAT,
- ValueType.FLOAT.name(),
- false,
- false,
- 80,
- null,
- null,
- null,
- null
+ new LinkedHashMap<>(
+ ImmutableMap.of(
+ TIME_COLUMN,
+ new ColumnAnalysis(
+ ColumnType.LONG,
+ ValueType.LONG.name(),
+ false,
+ false,
+ 100,
+ null,
+ null,
+ null,
+ null
+ ),
+ DIM_NAME,
+ new ColumnAnalysis(
+ ColumnType.STRING,
+ ValueType.STRING.name(),
+ false,
+ false,
+ 120,
+ 1,
+ DIM_VALUE,
+ DIM_VALUE,
+ null
+ ),
+ DIM_FLOAT_NAME,
+ new ColumnAnalysis(
+ ColumnType.FLOAT,
+ ValueType.FLOAT.name(),
+ false,
+ false,
+ 80,
+ null,
+ null,
+ null,
+ null
+ )
)
- ), 330,
+ ),
+ 330,
MAX_ROWS,
null,
null,
diff --git
a/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalysisTest.java
b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalysisTest.java
new file mode 100644
index 0000000000..4f68c9e059
--- /dev/null
+++
b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalysisTest.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.druid.query.metadata;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import org.apache.druid.data.input.impl.TimestampSpec;
+import org.apache.druid.java.util.common.Intervals;
+import org.apache.druid.java.util.common.granularity.Granularities;
+import org.apache.druid.query.aggregation.CountAggregatorFactory;
+import org.apache.druid.query.metadata.metadata.ColumnAnalysis;
+import org.apache.druid.query.metadata.metadata.SegmentAnalysis;
+import org.apache.druid.segment.TestHelper;
+import org.apache.druid.segment.column.ColumnType;
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.LinkedHashMap;
+
+public class SegmentAnalysisTest
+{
+ @Test
+ public void testSerde() throws Exception
+ {
+ // Use LinkedHashMap to preserve order.
+ // We'll verify that the order is actually preserved on serde.
+ final LinkedHashMap<String, ColumnAnalysis> columns = new
LinkedHashMap<>();
+ columns.put(
+ "b",
+ new ColumnAnalysis(ColumnType.LONG, ColumnType.LONG.asTypeString(),
true, true, 0, null, null, null, null)
+ );
+ columns.put(
+ "a",
+ new ColumnAnalysis(ColumnType.FLOAT, ColumnType.FLOAT.asTypeString(),
true, true, 0, null, null, null, null)
+ );
+ columns.put(
+ "f",
+ new ColumnAnalysis(ColumnType.STRING,
ColumnType.STRING.asTypeString(), true, true, 0, null, null, null, null)
+ );
+ columns.put(
+ "c",
+ new ColumnAnalysis(ColumnType.DOUBLE,
ColumnType.DOUBLE.asTypeString(), true, true, 0, null, null, null, null)
+ );
+
+ final SegmentAnalysis analysis = new SegmentAnalysis(
+ "id",
+ Intervals.ONLY_ETERNITY,
+ columns,
+ 1,
+ 2,
+ ImmutableMap.of("cnt", new CountAggregatorFactory("cnt")),
+ new TimestampSpec(null, null, null),
+ Granularities.SECOND,
+ true
+ );
+
+ final ObjectMapper jsonMapper = TestHelper.makeJsonMapper();
+ final SegmentAnalysis analysis2 = jsonMapper.readValue(
+ jsonMapper.writeValueAsBytes(analysis),
+ SegmentAnalysis.class
+ );
+
+ Assert.assertEquals(analysis, analysis2);
+
+ // Verify column order is preserved.
+ Assert.assertEquals(
+ ImmutableList.copyOf(columns.entrySet()),
+ ImmutableList.copyOf(analysis2.getColumns().entrySet())
+ );
+ }
+}
diff --git
a/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java
b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java
index 82482541fb..b8c35917c3 100644
---
a/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java
+++
b/processing/src/test/java/org/apache/druid/query/metadata/SegmentAnalyzerTest.java
@@ -46,6 +46,7 @@ import org.apache.druid.segment.QueryableIndexSegment;
import org.apache.druid.segment.Segment;
import org.apache.druid.segment.TestIndex;
import org.apache.druid.segment.column.ColumnBuilder;
+import org.apache.druid.segment.column.ColumnHolder;
import org.apache.druid.segment.column.ColumnType;
import org.apache.druid.segment.column.ValueType;
import org.apache.druid.segment.data.ObjectStrategy;
@@ -68,6 +69,7 @@ import java.io.IOException;
import java.net.URL;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumSet;
@@ -108,9 +110,20 @@ public class SegmentAnalyzerTest extends
InitializedNullHandlingTest
columns.size()
); // All columns including time and empty/null column
- for (DimensionSchema schema : TestIndex.DIMENSION_SCHEMAS) {
+ // Verify key order is the same as the underlying segment.
+ // This helps DruidSchema keep things in the proper order when it does
SegmentMetadata queries.
+ final List<Map.Entry<String, ColumnAnalysis>> entriesInOrder = new
ArrayList<>(columns.entrySet());
+
+ Assert.assertEquals(ColumnHolder.TIME_COLUMN_NAME,
entriesInOrder.get(0).getKey());
+ Assert.assertEquals(ColumnType.LONG,
entriesInOrder.get(0).getValue().getTypeSignature());
+
+ // Start from 1: skipping __time
+ for (int i = 0; i < TestIndex.DIMENSION_SCHEMAS.size(); i++) {
+ final DimensionSchema schema = TestIndex.DIMENSION_SCHEMAS.get(i);
+ final Map.Entry<String, ColumnAnalysis> analysisEntry =
entriesInOrder.get(i + 1 /* skip __time */);
final String dimension = schema.getName();
- final ColumnAnalysis columnAnalysis = columns.get(dimension);
+ Assert.assertEquals(dimension, analysisEntry.getKey());
+ final ColumnAnalysis columnAnalysis = analysisEntry.getValue();
final boolean isString = schema.getColumnType().is(ValueType.STRING);
Assert.assertEquals(dimension, schema.getColumnType().toString(),
columnAnalysis.getType());
@@ -161,14 +174,20 @@ public class SegmentAnalyzerTest extends
InitializedNullHandlingTest
Assert.assertEquals(SegmentId.dummy("test_1").toString(),
analysis.getId());
final Map<String, ColumnAnalysis> columns = analysis.getColumns();
- Assert.assertEquals(
- TestIndex.COLUMNS.length + 3,
- columns.size()
- ); // All columns including time
+ // Verify key order is the same as the underlying segment.
+ // This helps DruidSchema keep things in the proper order when it does
SegmentMetadata queries.
+ final List<Map.Entry<String, ColumnAnalysis>> entriesInOrder = new
ArrayList<>(columns.entrySet());
- for (DimensionSchema schema : TestIndex.DIMENSION_SCHEMAS) {
+ Assert.assertEquals(ColumnHolder.TIME_COLUMN_NAME,
entriesInOrder.get(0).getKey());
+ Assert.assertEquals(ColumnType.LONG,
entriesInOrder.get(0).getValue().getTypeSignature());
+
+ // Start from 1: skipping __time
+ for (int i = 0; i < TestIndex.DIMENSION_SCHEMAS.size(); i++) {
+ final DimensionSchema schema = TestIndex.DIMENSION_SCHEMAS.get(i);
+ final Map.Entry<String, ColumnAnalysis> analysisEntry =
entriesInOrder.get(i + 1 /* skip __time */);
final String dimension = schema.getName();
- final ColumnAnalysis columnAnalysis = columns.get(dimension);
+ Assert.assertEquals(dimension, analysisEntry.getKey());
+ final ColumnAnalysis columnAnalysis = analysisEntry.getValue();
final boolean isString = schema.getColumnType().is(ValueType.STRING);
Assert.assertEquals(dimension, schema.getColumnType().toString(),
columnAnalysis.getType());
Assert.assertEquals(dimension, 0, columnAnalysis.getSize());
@@ -204,6 +223,7 @@ public class SegmentAnalyzerTest extends
InitializedNullHandlingTest
* *Awesome* method name auto-generated by IntelliJ! I love IntelliJ!
*
* @param index
+ *
* @return
*/
private List<SegmentAnalysis> getSegmentAnalysises(Segment index,
EnumSet<SegmentMetadataQuery.AnalysisType> analyses)
@@ -257,6 +277,7 @@ public class SegmentAnalyzerTest extends
InitializedNullHandlingTest
* (which can happen if an aggregator was removed for a later version), then,
* analyzing the segment doesn't fail and the result of analysis of the
complex column
* is reported as an error.
+ *
* @throws IOException
*/
@Test
@@ -317,7 +338,10 @@ public class SegmentAnalyzerTest extends
InitializedNullHandlingTest
Assert.assertEquals("error:unknown_complex_invalid_complex_column_type",
invalidColumnAnalysis.getErrorMessage());
// Run a segment metadata query also to verify it doesn't break
- final List<SegmentAnalysis> results = getSegmentAnalysises(segment,
EnumSet.of(SegmentMetadataQuery.AnalysisType.SIZE));
+ final List<SegmentAnalysis> results = getSegmentAnalysises(
+ segment,
+ EnumSet.of(SegmentMetadataQuery.AnalysisType.SIZE)
+ );
for (SegmentAnalysis result : results) {
Assert.assertTrue(result.getColumns().get(invalid_aggregator).isError());
}
diff --git
a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java
b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java
index b93c160b29..f6a8e8b497 100644
---
a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java
+++
b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryQueryToolChestTest.java
@@ -46,6 +46,7 @@ import org.junit.Assert;
import org.junit.Test;
import java.util.HashMap;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@@ -77,20 +78,23 @@ public class SegmentMetadataQueryQueryToolChestTest
SegmentAnalysis result = new SegmentAnalysis(
"testSegment",
ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")),
- ImmutableMap.of(
- "placement",
- new ColumnAnalysis(
- ColumnType.STRING,
- ValueType.STRING.name(),
- true,
- false,
- 10881,
- 1,
- "preferred",
- "preferred",
- null
+ new LinkedHashMap<>(
+ ImmutableMap.of(
+ "placement",
+ new ColumnAnalysis(
+ ColumnType.STRING,
+ ValueType.STRING.name(),
+ true,
+ false,
+ 10881,
+ 1,
+ "preferred",
+ "preferred",
+ null
+ )
)
- ), 71982,
+ ),
+ 71982,
100,
null,
null,
@@ -117,7 +121,7 @@ public class SegmentMetadataQueryQueryToolChestTest
final SegmentAnalysis analysis1 = new SegmentAnalysis(
"id",
null,
- new HashMap<>(),
+ new LinkedHashMap<>(),
0,
0,
ImmutableMap.of(
@@ -131,7 +135,7 @@ public class SegmentMetadataQueryQueryToolChestTest
final SegmentAnalysis analysis2 = new SegmentAnalysis(
"id",
null,
- new HashMap<>(),
+ new LinkedHashMap<>(),
0,
0,
ImmutableMap.of(
@@ -167,7 +171,7 @@ public class SegmentMetadataQueryQueryToolChestTest
final SegmentAnalysis analysis1 = new SegmentAnalysis(
"id",
null,
- new HashMap<>(),
+ new LinkedHashMap<>(),
0,
0,
null,
@@ -178,7 +182,7 @@ public class SegmentMetadataQueryQueryToolChestTest
final SegmentAnalysis analysis2 = new SegmentAnalysis(
"id",
null,
- new HashMap<>(),
+ new LinkedHashMap<>(),
0,
0,
ImmutableMap.of(
@@ -206,7 +210,7 @@ public class SegmentMetadataQueryQueryToolChestTest
final SegmentAnalysis analysis1 = new SegmentAnalysis(
"id",
null,
- new HashMap<>(),
+ new LinkedHashMap<>(),
0,
0,
null,
@@ -217,7 +221,7 @@ public class SegmentMetadataQueryQueryToolChestTest
final SegmentAnalysis analysis2 = new SegmentAnalysis(
"id",
null,
- new HashMap<>(),
+ new LinkedHashMap<>(),
0,
0,
null,
@@ -236,7 +240,7 @@ public class SegmentMetadataQueryQueryToolChestTest
final SegmentAnalysis analysis1 = new SegmentAnalysis(
"id",
null,
- new HashMap<>(),
+ new LinkedHashMap<>(),
0,
0,
ImmutableMap.of(
@@ -250,7 +254,7 @@ public class SegmentMetadataQueryQueryToolChestTest
final SegmentAnalysis analysis2 = new SegmentAnalysis(
"id",
null,
- new HashMap<>(),
+ new LinkedHashMap<>(),
0,
0,
ImmutableMap.of(
@@ -331,7 +335,7 @@ public class SegmentMetadataQueryQueryToolChestTest
final SegmentAnalysis analysis1 = new SegmentAnalysis(
"id",
null,
- new HashMap<>(),
+ new LinkedHashMap<>(),
0,
0,
null,
@@ -342,7 +346,7 @@ public class SegmentMetadataQueryQueryToolChestTest
final SegmentAnalysis analysis2 = new SegmentAnalysis(
"id",
null,
- new HashMap<>(),
+ new LinkedHashMap<>(),
0,
0,
null,
@@ -353,7 +357,7 @@ public class SegmentMetadataQueryQueryToolChestTest
final SegmentAnalysis analysis3 = new SegmentAnalysis(
"id",
null,
- new HashMap<>(),
+ new LinkedHashMap<>(),
0,
0,
null,
@@ -364,7 +368,7 @@ public class SegmentMetadataQueryQueryToolChestTest
final SegmentAnalysis analysis4 = new SegmentAnalysis(
"id",
null,
- new HashMap<>(),
+ new LinkedHashMap<>(),
0,
0,
null,
@@ -375,7 +379,7 @@ public class SegmentMetadataQueryQueryToolChestTest
final SegmentAnalysis analysis5 = new SegmentAnalysis(
"id",
null,
- new HashMap<>(),
+ new LinkedHashMap<>(),
0,
0,
null,
@@ -395,6 +399,7 @@ public class SegmentMetadataQueryQueryToolChestTest
{
return SegmentMetadataQueryQueryToolChest.finalizeAnalysis(
SegmentMetadataQueryQueryToolChest.mergeAnalyses(
+ null,
analysis1,
analysis2,
false
@@ -406,6 +411,7 @@ public class SegmentMetadataQueryQueryToolChestTest
{
return SegmentMetadataQueryQueryToolChest.finalizeAnalysis(
SegmentMetadataQueryQueryToolChest.mergeAnalyses(
+ null,
analysis1,
analysis2,
true
diff --git
a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java
b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java
index ded726d583..0a93dd373a 100644
---
a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java
+++
b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataQueryTest.java
@@ -69,6 +69,7 @@ import java.util.Collection;
import java.util.Collections;
import java.util.EnumSet;
import java.util.HashMap;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutorService;
@@ -202,42 +203,44 @@ public class SegmentMetadataQueryTest extends
InitializedNullHandlingTest
expectedSegmentAnalysis1 = new SegmentAnalysis(
id1.toString(),
ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")),
- ImmutableMap.of(
- "__time",
- new ColumnAnalysis(
- ColumnType.LONG,
- ValueType.LONG.toString(),
- false,
- false,
- 12090,
- null,
- null,
- null,
- null
- ),
- "index",
- new ColumnAnalysis(
- ColumnType.DOUBLE,
- ValueType.DOUBLE.toString(),
- false,
- false,
- 9672,
- null,
- null,
- null,
- null
- ),
- "placement",
- new ColumnAnalysis(
- ColumnType.STRING,
- ValueType.STRING.toString(),
- false,
- false,
- preferedSize1,
- 1,
- "preferred",
- "preferred",
- null
+ new LinkedHashMap<>(
+ ImmutableMap.of(
+ "__time",
+ new ColumnAnalysis(
+ ColumnType.LONG,
+ ValueType.LONG.toString(),
+ false,
+ false,
+ 12090,
+ null,
+ null,
+ null,
+ null
+ ),
+ "index",
+ new ColumnAnalysis(
+ ColumnType.DOUBLE,
+ ValueType.DOUBLE.toString(),
+ false,
+ false,
+ 9672,
+ null,
+ null,
+ null,
+ null
+ ),
+ "placement",
+ new ColumnAnalysis(
+ ColumnType.STRING,
+ ValueType.STRING.toString(),
+ false,
+ false,
+ preferedSize1,
+ 1,
+ "preferred",
+ "preferred",
+ null
+ )
)
),
overallSize1,
@@ -250,42 +253,44 @@ public class SegmentMetadataQueryTest extends
InitializedNullHandlingTest
expectedSegmentAnalysis2 = new SegmentAnalysis(
id2.toString(),
ImmutableList.of(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")),
- ImmutableMap.of(
- "__time",
- new ColumnAnalysis(
- ColumnType.LONG,
- ValueType.LONG.toString(),
- false,
- false,
- 12090,
- null,
- null,
- null,
- null
- ),
- "index",
- new ColumnAnalysis(
- ColumnType.DOUBLE,
- ValueType.DOUBLE.toString(),
- false,
- false,
- 9672,
- null,
- null,
- null,
- null
- ),
- "placement",
- new ColumnAnalysis(
- ColumnType.STRING,
- ValueType.STRING.toString(),
- false,
- false,
- placementSize2,
- 1,
- null,
- null,
- null
+ new LinkedHashMap<>(
+ ImmutableMap.of(
+ "__time",
+ new ColumnAnalysis(
+ ColumnType.LONG,
+ ValueType.LONG.toString(),
+ false,
+ false,
+ 12090,
+ null,
+ null,
+ null,
+ null
+ ),
+ "index",
+ new ColumnAnalysis(
+ ColumnType.DOUBLE,
+ ValueType.DOUBLE.toString(),
+ false,
+ false,
+ 9672,
+ null,
+ null,
+ null,
+ null
+ ),
+ "placement",
+ new ColumnAnalysis(
+ ColumnType.STRING,
+ ValueType.STRING.toString(),
+ false,
+ false,
+ placementSize2,
+ 1,
+ null,
+ null,
+ null
+ )
)
),
// null_column will be included only for incremental index, which
makes a little bigger result than expected
@@ -313,30 +318,32 @@ public class SegmentMetadataQueryTest extends
InitializedNullHandlingTest
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(
differentIds ? "merged" : SegmentId.dummy("testSegment").toString(),
null,
- ImmutableMap.of(
- "placement",
- new ColumnAnalysis(
- ColumnType.STRING,
- ValueType.STRING.toString(),
- false,
- false,
- 0,
- 0,
- NullHandling.defaultStringValue(),
- NullHandling.defaultStringValue(),
- null
- ),
- "placementish",
- new ColumnAnalysis(
- ColumnType.STRING,
- ValueType.STRING.toString(),
- true,
- false,
- 0,
- 0,
- NullHandling.defaultStringValue(),
- NullHandling.defaultStringValue(),
- null
+ new LinkedHashMap<>(
+ ImmutableMap.of(
+ "placement",
+ new ColumnAnalysis(
+ ColumnType.STRING,
+ ValueType.STRING.toString(),
+ false,
+ false,
+ 0,
+ 0,
+ NullHandling.defaultStringValue(),
+ NullHandling.defaultStringValue(),
+ null
+ ),
+ "placementish",
+ new ColumnAnalysis(
+ ColumnType.STRING,
+ ValueType.STRING.toString(),
+ true,
+ false,
+ 0,
+ 0,
+ NullHandling.defaultStringValue(),
+ NullHandling.defaultStringValue(),
+ null
+ )
)
),
0,
@@ -385,30 +392,32 @@ public class SegmentMetadataQueryTest extends
InitializedNullHandlingTest
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(
differentIds ? "merged" : SegmentId.dummy("testSegment").toString(),
null,
- ImmutableMap.of(
- "placement",
- new ColumnAnalysis(
- ColumnType.STRING,
- ValueType.STRING.toString(),
- false,
- false,
- 0,
- 1,
- NullHandling.defaultStringValue(),
- NullHandling.defaultStringValue(),
- null
- ),
- "placementish",
- new ColumnAnalysis(
- ColumnType.STRING,
- ValueType.STRING.toString(),
- true,
- false,
- 0,
- 9,
- NullHandling.defaultStringValue(),
- NullHandling.defaultStringValue(),
- null
+ new LinkedHashMap<>(
+ ImmutableMap.of(
+ "placement",
+ new ColumnAnalysis(
+ ColumnType.STRING,
+ ValueType.STRING.toString(),
+ false,
+ false,
+ 0,
+ 1,
+ NullHandling.defaultStringValue(),
+ NullHandling.defaultStringValue(),
+ null
+ ),
+ "placementish",
+ new ColumnAnalysis(
+ ColumnType.STRING,
+ ValueType.STRING.toString(),
+ true,
+ false,
+ 0,
+ 9,
+ NullHandling.defaultStringValue(),
+ NullHandling.defaultStringValue(),
+ null
+ )
)
),
0,
@@ -457,30 +466,32 @@ public class SegmentMetadataQueryTest extends
InitializedNullHandlingTest
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(
differentIds ? "merged" : SegmentId.dummy("testSegment").toString(),
null,
- ImmutableMap.of(
- "placement",
- new ColumnAnalysis(
- ColumnType.STRING,
- ValueType.STRING.toString(),
- false,
- false,
- 0,
- 1,
- NullHandling.defaultStringValue(),
- NullHandling.defaultStringValue(),
- null
- ),
- "quality_uniques",
- new ColumnAnalysis(
- ColumnType.ofComplex("hyperUnique"),
- "hyperUnique",
- false,
- true,
- 0,
- null,
- null,
- null,
- null
+ new LinkedHashMap<>(
+ ImmutableMap.of(
+ "placement",
+ new ColumnAnalysis(
+ ColumnType.STRING,
+ ValueType.STRING.toString(),
+ false,
+ false,
+ 0,
+ 1,
+ NullHandling.defaultStringValue(),
+ NullHandling.defaultStringValue(),
+ null
+ ),
+ "quality_uniques",
+ new ColumnAnalysis(
+ ColumnType.ofComplex("hyperUnique"),
+ "hyperUnique",
+ false,
+ true,
+ 0,
+ null,
+ null,
+ null,
+ null
+ )
)
),
0,
@@ -600,33 +611,35 @@ public class SegmentMetadataQueryTest extends
InitializedNullHandlingTest
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(
differentIds ? "merged" : SegmentId.dummy("testSegment").toString(),
ImmutableList.of(expectedSegmentAnalysis1.getIntervals().get(0)),
- ImmutableMap.of(
- "__time",
- new ColumnAnalysis(
- ColumnType.LONG,
- ValueType.LONG.toString(),
- false,
- false,
- 12090 * 2,
- null,
- null,
- null,
- null
- ),
- "index",
- new ColumnAnalysis(
- ColumnType.DOUBLE,
- ValueType.DOUBLE.toString(),
- false,
- false,
- 9672 * 2,
- null,
- null,
- null,
- null
- ),
- column,
- analysis
+ new LinkedHashMap<>(
+ ImmutableMap.of(
+ "__time",
+ new ColumnAnalysis(
+ ColumnType.LONG,
+ ValueType.LONG.toString(),
+ false,
+ false,
+ 12090 * 2,
+ null,
+ null,
+ null,
+ null
+ ),
+ "index",
+ new ColumnAnalysis(
+ ColumnType.DOUBLE,
+ ValueType.DOUBLE.toString(),
+ false,
+ false,
+ 9672 * 2,
+ null,
+ null,
+ null,
+ null
+ ),
+ column,
+ analysis
+ )
),
expectedSegmentAnalysis1.getSize() +
expectedSegmentAnalysis2.getSize(),
expectedSegmentAnalysis1.getNumRows() +
expectedSegmentAnalysis2.getNumRows(),
@@ -668,18 +681,20 @@ public class SegmentMetadataQueryTest extends
InitializedNullHandlingTest
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(
differentIds ? "merged" : SegmentId.dummy("testSegment").toString(),
null,
- ImmutableMap.of(
- "placement",
- new ColumnAnalysis(
- ColumnType.STRING,
- ValueType.STRING.toString(),
- false,
- false,
- 0,
- 0,
- NullHandling.defaultStringValue(),
- NullHandling.defaultStringValue(),
- null
+ new LinkedHashMap<>(
+ ImmutableMap.of(
+ "placement",
+ new ColumnAnalysis(
+ ColumnType.STRING,
+ ValueType.STRING.toString(),
+ false,
+ false,
+ 0,
+ 0,
+ NullHandling.defaultStringValue(),
+ NullHandling.defaultStringValue(),
+ null
+ )
)
),
0,
@@ -732,18 +747,20 @@ public class SegmentMetadataQueryTest extends
InitializedNullHandlingTest
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(
differentIds ? "merged" : SegmentId.dummy("testSegment").toString(),
null,
- ImmutableMap.of(
- "placement",
- new ColumnAnalysis(
- ColumnType.STRING,
- ValueType.STRING.toString(),
- false,
- false,
- 0,
- 0,
- NullHandling.defaultStringValue(),
- NullHandling.defaultStringValue(),
- null
+ new LinkedHashMap<>(
+ ImmutableMap.of(
+ "placement",
+ new ColumnAnalysis(
+ ColumnType.STRING,
+ ValueType.STRING.toString(),
+ false,
+ false,
+ 0,
+ 0,
+ NullHandling.defaultStringValue(),
+ NullHandling.defaultStringValue(),
+ null
+ )
)
),
0,
@@ -792,18 +809,20 @@ public class SegmentMetadataQueryTest extends
InitializedNullHandlingTest
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(
differentIds ? "merged" : SegmentId.dummy("testSegment").toString(),
null,
- ImmutableMap.of(
- "placement",
- new ColumnAnalysis(
- ColumnType.STRING,
- ValueType.STRING.toString(),
- false,
- false,
- 0,
- 0,
- NullHandling.defaultStringValue(),
- NullHandling.defaultStringValue(),
- null
+ new LinkedHashMap<>(
+ ImmutableMap.of(
+ "placement",
+ new ColumnAnalysis(
+ ColumnType.STRING,
+ ValueType.STRING.toString(),
+ false,
+ false,
+ 0,
+ 0,
+ NullHandling.defaultStringValue(),
+ NullHandling.defaultStringValue(),
+ null
+ )
)
),
0,
@@ -852,18 +871,20 @@ public class SegmentMetadataQueryTest extends
InitializedNullHandlingTest
SegmentAnalysis mergedSegmentAnalysis = new SegmentAnalysis(
differentIds ? "merged" : SegmentId.dummy("testSegment").toString(),
null,
- ImmutableMap.of(
- "placement",
- new ColumnAnalysis(
- ColumnType.STRING,
- ValueType.STRING.toString(),
- false,
- false,
- 0,
- 0,
- NullHandling.defaultStringValue(),
- NullHandling.defaultStringValue(),
- null
+ new LinkedHashMap<>(
+ ImmutableMap.of(
+ "placement",
+ new ColumnAnalysis(
+ ColumnType.STRING,
+ ValueType.STRING.toString(),
+ false,
+ false,
+ 0,
+ 0,
+ NullHandling.defaultStringValue(),
+ NullHandling.defaultStringValue(),
+ null
+ )
)
),
0,
@@ -937,7 +958,10 @@ public class SegmentMetadataQueryTest extends
InitializedNullHandlingTest
TestHelper.assertExpectedObjects(
ImmutableList.of(bySegmentResult, bySegmentResult),
-
myRunner.run(QueryPlus.wrap(testQuery.withOverriddenContext(ImmutableMap.of(QueryContexts.BY_SEGMENT_KEY,
true)))),
+
myRunner.run(QueryPlus.wrap(testQuery.withOverriddenContext(ImmutableMap.of(
+ QueryContexts.BY_SEGMENT_KEY,
+ true
+ )))),
"failed SegmentMetadata bySegment query"
);
exec.shutdownNow();
@@ -1265,12 +1289,12 @@ public class SegmentMetadataQueryTest extends
InitializedNullHandlingTest
.build();
final byte[] oneColumnQueryCacheKey = new
SegmentMetadataQueryQueryToolChest(new
SegmentMetadataQueryConfig()).getCacheStrategy(
- oneColumnQuery)
+
oneColumnQuery)
.computeCacheKey(
oneColumnQuery);
final byte[] twoColumnQueryCacheKey = new
SegmentMetadataQueryQueryToolChest(new
SegmentMetadataQueryConfig()).getCacheStrategy(
- twoColumnQuery)
+
twoColumnQuery)
.computeCacheKey(
twoColumnQuery);
diff --git
a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java
b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java
index ead3c9ff62..b29bfeb495 100644
---
a/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java
+++
b/processing/src/test/java/org/apache/druid/query/metadata/SegmentMetadataUnionQueryTest.java
@@ -44,6 +44,7 @@ import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.Collections;
+import java.util.LinkedHashMap;
import java.util.List;
@RunWith(Parameterized.class)
@@ -80,7 +81,7 @@ public class SegmentMetadataUnionQueryTest extends
InitializedNullHandlingTest
null
),
true,
- },
+ },
new Object[]{
QueryRunnerTestHelper.makeUnionQueryRunner(
FACTORY,
@@ -99,18 +100,20 @@ public class SegmentMetadataUnionQueryTest extends
InitializedNullHandlingTest
SegmentAnalysis expected = new SegmentAnalysis(
QueryRunnerTestHelper.SEGMENT_ID.toString(),
Collections.singletonList(Intervals.of("2011-01-12T00:00:00.000Z/2011-04-15T00:00:00.001Z")),
- ImmutableMap.of(
- "placement",
- new ColumnAnalysis(
- ColumnType.STRING,
- ValueType.STRING.toString(),
- false,
- false,
- mmap ? 43524 : 43056,
- 1,
- "preferred",
- "preferred",
- null
+ new LinkedHashMap<>(
+ ImmutableMap.of(
+ "placement",
+ new ColumnAnalysis(
+ ColumnType.STRING,
+ ValueType.STRING.toString(),
+ false,
+ false,
+ mmap ? 43524 : 43056,
+ 1,
+ "preferred",
+ "preferred",
+ null
+ )
)
),
mmap ? 805380 : 803324,
diff --git
a/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchema.java
b/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchema.java
index 785913b0a6..898489cce3 100644
--- a/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchema.java
+++ b/sql/src/main/java/org/apache/druid/sql/calcite/schema/DruidSchema.java
@@ -73,6 +73,7 @@ import java.io.IOException;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.HashSet;
+import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
@@ -783,7 +784,9 @@ public class DruidSchema extends AbstractSchema
DruidTable buildDruidTable(final String dataSource)
{
ConcurrentSkipListMap<SegmentId, AvailableSegmentMetadata> segmentsMap =
segmentMetadataInfo.get(dataSource);
- final Map<String, ColumnType> columnTypes = new TreeMap<>();
+
+ // Preserve order.
+ final Map<String, ColumnType> columnTypes = new LinkedHashMap<>();
if (segmentsMap != null && !segmentsMap.isEmpty()) {
for (AvailableSegmentMetadata availableSegmentMetadata :
segmentsMap.values()) {
diff --git
a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java
b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java
index 5baa3de83a..0e7128b069 100644
---
a/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java
+++
b/sql/src/test/java/org/apache/druid/sql/avatica/DruidAvaticaHandlerTest.java
@@ -572,14 +572,6 @@ public abstract class DruidAvaticaHandlerTest extends
CalciteTestBase
Pair.of("TYPE_NAME", "TIMESTAMP"),
Pair.of("IS_NULLABLE", "NO")
),
- row(
- Pair.of("TABLE_SCHEM", "druid"),
- Pair.of("TABLE_NAME", "foo"),
- Pair.of("COLUMN_NAME", "cnt"),
- Pair.of("DATA_TYPE", Types.BIGINT),
- Pair.of("TYPE_NAME", "BIGINT"),
- Pair.of("IS_NULLABLE", nullNumeric ? "YES" : "NO")
- ),
row(
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_NAME", "foo"),
@@ -604,6 +596,14 @@ public abstract class DruidAvaticaHandlerTest extends
CalciteTestBase
Pair.of("TYPE_NAME", "VARCHAR"),
Pair.of("IS_NULLABLE", "YES")
),
+ row(
+ Pair.of("TABLE_SCHEM", "druid"),
+ Pair.of("TABLE_NAME", "foo"),
+ Pair.of("COLUMN_NAME", "cnt"),
+ Pair.of("DATA_TYPE", Types.BIGINT),
+ Pair.of("TYPE_NAME", "BIGINT"),
+ Pair.of("IS_NULLABLE", nullNumeric ? "YES" : "NO")
+ ),
row(
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_NAME", "foo"),
@@ -663,14 +663,6 @@ public abstract class DruidAvaticaHandlerTest extends
CalciteTestBase
Pair.of("TYPE_NAME", "TIMESTAMP"),
Pair.of("IS_NULLABLE", "NO")
),
- row(
- Pair.of("TABLE_SCHEM", "druid"),
- Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE),
- Pair.of("COLUMN_NAME", "cnt"),
- Pair.of("DATA_TYPE", Types.BIGINT),
- Pair.of("TYPE_NAME", "BIGINT"),
- Pair.of("IS_NULLABLE", nullNumeric ? "YES" : "NO")
- ),
row(
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE),
@@ -687,6 +679,14 @@ public abstract class DruidAvaticaHandlerTest extends
CalciteTestBase
Pair.of("TYPE_NAME", "VARCHAR"),
Pair.of("IS_NULLABLE", "YES")
),
+ row(
+ Pair.of("TABLE_SCHEM", "druid"),
+ Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE),
+ Pair.of("COLUMN_NAME", "cnt"),
+ Pair.of("DATA_TYPE", Types.BIGINT),
+ Pair.of("TYPE_NAME", "BIGINT"),
+ Pair.of("IS_NULLABLE", nullNumeric ? "YES" : "NO")
+ ),
row(
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_NAME", CalciteTests.FORBIDDEN_DATASOURCE),
@@ -1184,22 +1184,22 @@ public abstract class DruidAvaticaHandlerTest extends
CalciteTestBase
row(
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_NAME", CalciteTests.SOME_DATASOURCE),
- Pair.of("COLUMN_NAME", "cnt")
+ Pair.of("COLUMN_NAME", "dim1")
),
row(
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_NAME", CalciteTests.SOME_DATASOURCE),
- Pair.of("COLUMN_NAME", "dim1")
+ Pair.of("COLUMN_NAME", "dim2")
),
row(
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_NAME", CalciteTests.SOME_DATASOURCE),
- Pair.of("COLUMN_NAME", "dim2")
+ Pair.of("COLUMN_NAME", "dim3")
),
row(
Pair.of("TABLE_SCHEM", "druid"),
Pair.of("TABLE_NAME", CalciteTests.SOME_DATASOURCE),
- Pair.of("COLUMN_NAME", "dim3")
+ Pair.of("COLUMN_NAME", "cnt")
),
row(
Pair.of("TABLE_SCHEM", "druid"),
diff --git
a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java
b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java
index c04f429ff1..c6eeb83391 100644
--- a/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/avatica/DruidStatementTest.java
@@ -126,10 +126,10 @@ public class DruidStatementTest extends CalciteTestBase
Assert.assertEquals(
Lists.newArrayList(
Lists.newArrayList("__time", "TIMESTAMP", "java.lang.Long"),
- Lists.newArrayList("cnt", "BIGINT", "java.lang.Number"),
Lists.newArrayList("dim1", "VARCHAR", "java.lang.String"),
Lists.newArrayList("dim2", "VARCHAR", "java.lang.String"),
Lists.newArrayList("dim3", "VARCHAR", "java.lang.String"),
+ Lists.newArrayList("cnt", "BIGINT", "java.lang.Number"),
Lists.newArrayList("m1", "FLOAT", "java.lang.Float"),
Lists.newArrayList("m2", "DOUBLE", "java.lang.Double"),
Lists.newArrayList("unique_dim1", "OTHER", "java.lang.Object")
@@ -166,9 +166,7 @@ public class DruidStatementTest extends CalciteTestBase
true,
Lists.newArrayList(
new Object[]{""},
- new Object[]{
- "1"
- },
+ new Object[]{"1"},
new Object[]{"10.1"},
new Object[]{"2"},
new Object[]{"abc"},
diff --git
a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java
b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java
index f07f60980d..31246b4bca 100644
---
a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java
+++
b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteExplainQueryTest.java
@@ -108,8 +108,8 @@ public class CalciteExplainQueryTest extends
BaseCalciteQueryTest
+ ")";
final String legacyExplanation =
"DruidOuterQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"query\",\"query\":{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"__subquery__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"list\",\"granularity\":{\"type\":\"all\"}}},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\
[...]
- + " DruidJoinQueryRel(condition=[=(SUBSTRING($3, 1, 1), $8)],
joinType=[inner],
query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__join__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dim2\",\"outputName\":\"d0\",\"outputType\":\"STRING\"}],\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"
[...]
- + "
DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-0
[...]
+ + " DruidJoinQueryRel(condition=[=(SUBSTRING($2, 1, 1), $8)],
joinType=[inner],
query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"__join__\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"default\",\"dimension\":\"dim2\",\"outputName\":\"d0\",\"outputType\":\"STRING\"}],\"limitSpec\":{\"type\":\"NoopLimitSpec\"},\"
[...]
+ + "
DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-0
[...]
+ "
DruidQueryRel(query=[{\"queryType\":\"groupBy\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"filter\":{\"type\":\"not\",\"field\":{\"type\":\"selector\",\"dimension\":\"dim1\",\"value\":null}},\"granularity\":{\"type\":\"all\"},\"dimensions\":[{\"type\":\"extraction\",\"dimension\":\"dim1\",\"outputName\":\"d0\",\"outputType\":\"STRING\",\"extra
[...]
final String explanation = "["
+ "{\"query\":{\"queryType\":\"groupBy\","
@@ -153,8 +153,8 @@ public class CalciteExplainQueryTest extends
BaseCalciteQueryTest
// Skip vectorization since otherwise the "context" will change for each
subtest.
skipVectorize();
- String legacyExplanation =
"DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTim
[...]
- String legacyExplanationWithContext =
"DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sq
[...]
+ String legacyExplanation =
"DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTim
[...]
+ String legacyExplanationWithContext =
"DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sq
[...]
String explanation = "[{"
+ "\"query\":{\"queryType\":\"scan\","
+
"\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},"
@@ -164,7 +164,7 @@ public class CalciteExplainQueryTest extends
BaseCalciteQueryTest
+ "\"legacy\":false,"
+
"\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},"
+ "\"granularity\":{\"type\":\"all\"}},"
- +
"\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX<hyperUnique>\"}]"
+ +
"\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX<hyperUnique>\"}]"
+ "}]";
String explanationWithContext = "[{"
@@ -176,7 +176,7 @@ public class CalciteExplainQueryTest extends
BaseCalciteQueryTest
+ "\"legacy\":false,"
+
"\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"useNativeQueryExplain\":true,\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},"
+ "\"granularity\":{\"type\":\"all\"}},"
- +
"\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX<hyperUnique>\"}]"
+ +
"\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX<hyperUnique>\"}]"
+ "}]";
String sql = "EXPLAIN PLAN FOR SELECT * FROM druid.foo";
String resources = "[{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]";
diff --git
a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java
b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java
index 7040e7fc53..8200fa2dc1 100644
---
a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java
+++
b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteIngestionDmlTest.java
@@ -70,10 +70,10 @@ public class CalciteIngestionDmlTest extends
BaseCalciteQueryTest
protected static final RowSignature FOO_TABLE_SIGNATURE =
RowSignature.builder()
.addTimeColumn()
- .add("cnt", ColumnType.LONG)
.add("dim1", ColumnType.STRING)
.add("dim2", ColumnType.STRING)
.add("dim3", ColumnType.STRING)
+ .add("cnt", ColumnType.LONG)
.add("m1", ColumnType.FLOAT)
.add("m2", ColumnType.DOUBLE)
.add("unique_dim1", HyperUniquesAggregatorFactory.TYPE)
diff --git
a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java
b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java
index 15680504c6..77e6ca22d2 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteQueryTest.java
@@ -282,10 +282,10 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
ImmutableList.of(),
ImmutableList.of(
new Object[]{"__time", "TIMESTAMP", "NO"},
- new Object[]{"cnt", "BIGINT", useDefault ? "NO" : "YES"},
new Object[]{"dim1", "VARCHAR", "YES"},
new Object[]{"dim2", "VARCHAR", "YES"},
new Object[]{"dim3", "VARCHAR", "YES"},
+ new Object[]{"cnt", "BIGINT", useDefault ? "NO" : "YES"},
new Object[]{"m1", "FLOAT", useDefault ? "NO" : "YES"},
new Object[]{"m2", "DOUBLE", useDefault ? "NO" : "YES"},
new Object[]{"unique_dim1", "COMPLEX<hyperUnique>", "YES"}
@@ -313,9 +313,9 @@ public class CalciteQueryTest extends BaseCalciteQueryTest
ImmutableList.of(),
ImmutableList.of(
new Object[]{"__time", "TIMESTAMP", "NO"},
- new Object[]{"cnt", "BIGINT", useDefault ? "NO" : "YES"},
new Object[]{"dim1", "VARCHAR", "YES"},
new Object[]{"dim2", "VARCHAR", "YES"},
+ new Object[]{"cnt", "BIGINT", useDefault ? "NO" : "YES"},
new Object[]{"m1", "FLOAT", useDefault ? "NO" : "YES"},
new Object[]{"m2", "DOUBLE", useDefault ? "NO" : "YES"},
new Object[]{"unique_dim1", "COMPLEX<hyperUnique>", "YES"}
diff --git
a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java
b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java
index 310c2eb8e2..4f39dff99f 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/CalciteSelectQueryTest.java
@@ -592,9 +592,9 @@ public class CalciteSelectQueryTest extends
BaseCalciteQueryTest
.build()
),
ImmutableList.of(
- new Object[]{timestamp("2000-01-01"), 1L, "", "a",
"[\"a\",\"b\"]", 1.0f, 1.0d, HLLC_STRING},
- new Object[]{timestamp("2001-01-01"), 1L, "1", "a", "", 4.0f,
4.0d, HLLC_STRING},
- new Object[]{timestamp("2001-01-02"), 1L, "def", "abc",
NULL_STRING, 5.0f, 5.0d, HLLC_STRING}
+ new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]",
1L, 1.0f, 1.0d, HLLC_STRING},
+ new Object[]{timestamp("2001-01-01"), "1", "a", "", 1L, 4.0f,
4.0d, HLLC_STRING},
+ new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING,
1L, 5.0f, 5.0d, HLLC_STRING}
)
);
}
@@ -1097,12 +1097,12 @@ public class CalciteSelectQueryTest extends
BaseCalciteQueryTest
.build()
),
ImmutableList.of(
- new Object[]{timestamp("2000-01-01"), 1L, "", "a",
"[\"a\",\"b\"]", 1f, 1.0, HLLC_STRING},
- new Object[]{timestamp("2000-01-02"), 1L, "10.1", NULL_STRING,
"[\"b\",\"c\"]", 2f, 2.0, HLLC_STRING},
- new Object[]{timestamp("2000-01-03"), 1L, "2", "", "d", 3f, 3.0,
HLLC_STRING},
- new Object[]{timestamp("2001-01-01"), 1L, "1", "a", "", 4f, 4.0,
HLLC_STRING},
- new Object[]{timestamp("2001-01-02"), 1L, "def", "abc",
NULL_STRING, 5f, 5.0, HLLC_STRING},
- new Object[]{timestamp("2001-01-03"), 1L, "abc", NULL_STRING,
NULL_STRING, 6f, 6.0, HLLC_STRING}
+ new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]",
1L, 1f, 1.0, HLLC_STRING},
+ new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING,
"[\"b\",\"c\"]", 1L, 2f, 2.0, HLLC_STRING},
+ new Object[]{timestamp("2000-01-03"), "2", "", "d", 1L, 3f, 3.0,
HLLC_STRING},
+ new Object[]{timestamp("2001-01-01"), "1", "a", "", 1L, 4f, 4.0,
HLLC_STRING},
+ new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING,
1L, 5f, 5.0, HLLC_STRING},
+ new Object[]{timestamp("2001-01-03"), "abc", NULL_STRING,
NULL_STRING, 1L, 6f, 6.0, HLLC_STRING}
)
);
}
@@ -1131,18 +1131,18 @@ public class CalciteSelectQueryTest extends
BaseCalciteQueryTest
ImmutableList.of(
new Object[]{
timestamp("2000-01-01"),
- 1L,
"forbidden",
"abcd",
+ 1L,
9999.0f,
NullHandling.defaultDoubleValue(),
"\"AQAAAQAAAALFBA==\""
},
new Object[]{
timestamp("2000-01-02"),
- 1L,
"forbidden",
"a",
+ 1L,
1234.0f,
NullHandling.defaultDoubleValue(),
"\"AQAAAQAAAALFBA==\""
@@ -1271,7 +1271,7 @@ public class CalciteSelectQueryTest extends
BaseCalciteQueryTest
skipVectorize();
final String query = "EXPLAIN PLAN FOR SELECT * FROM druid.foo";
- final String legacyExplanation =
"DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurr
[...]
+ final String legacyExplanation =
"DruidQueryRel(query=[{\"queryType\":\"scan\",\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},\"intervals\":{\"type\":\"intervals\",\"intervals\":[\"-146136543-09-08T08:23:32.096Z/146140482-04-24T15:36:27.903Z\"]},\"resultFormat\":\"compactedList\",\"columns\":[\"__time\",\"cnt\",\"dim1\",\"dim2\",\"dim3\",\"m1\",\"m2\",\"unique_dim1\"],\"legacy\":false,\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurr
[...]
final String explanation = "[{"
+ "\"query\":{\"queryType\":\"scan\","
+
"\"dataSource\":{\"type\":\"table\",\"name\":\"foo\"},"
@@ -1281,7 +1281,7 @@ public class CalciteSelectQueryTest extends
BaseCalciteQueryTest
+ "\"legacy\":false,"
+
"\"context\":{\"defaultTimeout\":300000,\"maxScatterGatherBytes\":9223372036854775807,\"sqlCurrentTimestamp\":\"2000-01-01T00:00:00Z\",\"sqlQueryId\":\"dummy\",\"vectorize\":\"false\",\"vectorizeVirtualColumns\":\"false\"},"
+ "\"granularity\":{\"type\":\"all\"}},"
- +
"\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX<hyperUnique>\"}]"
+ +
"\"signature\":[{\"name\":\"__time\",\"type\":\"LONG\"},{\"name\":\"dim1\",\"type\":\"STRING\"},{\"name\":\"dim2\",\"type\":\"STRING\"},{\"name\":\"dim3\",\"type\":\"STRING\"},{\"name\":\"cnt\",\"type\":\"LONG\"},{\"name\":\"m1\",\"type\":\"FLOAT\"},{\"name\":\"m2\",\"type\":\"DOUBLE\"},{\"name\":\"unique_dim1\",\"type\":\"COMPLEX<hyperUnique>\"}]"
+ "}]";
final String resources = "[{\"name\":\"foo\",\"type\":\"DATASOURCE\"}]";
@@ -1328,8 +1328,8 @@ public class CalciteSelectQueryTest extends
BaseCalciteQueryTest
.build()
),
ImmutableList.of(
- new Object[]{timestamp("2000-01-01"), 1L, "", "a",
"[\"a\",\"b\"]", 1.0f, 1.0, HLLC_STRING},
- new Object[]{timestamp("2000-01-02"), 1L, "10.1", NULL_STRING,
"[\"b\",\"c\"]", 2.0f, 2.0, HLLC_STRING}
+ new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]",
1L, 1.0f, 1.0, HLLC_STRING},
+ new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING,
"[\"b\",\"c\"]", 1L, 2.0f, 2.0, HLLC_STRING}
)
);
}
@@ -1354,8 +1354,8 @@ public class CalciteSelectQueryTest extends
BaseCalciteQueryTest
.build()
),
ImmutableList.of(
- new Object[]{timestamp("2000-01-02"), 1L, "10.1", NULL_STRING,
"[\"b\",\"c\"]", 2.0f, 2.0, HLLC_STRING},
- new Object[]{timestamp("2000-01-03"), 1L, "2", "", "d", 3f, 3.0,
HLLC_STRING}
+ new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING,
"[\"b\",\"c\"]", 1L, 2.0f, 2.0, HLLC_STRING},
+ new Object[]{timestamp("2000-01-03"), "2", "", "d", 1L, 3f, 3.0,
HLLC_STRING}
)
);
}
@@ -1429,8 +1429,8 @@ public class CalciteSelectQueryTest extends
BaseCalciteQueryTest
.build()
),
ImmutableList.of(
- new Object[]{timestamp("2001-01-03"), 1L, "abc", NULL_STRING,
NULL_STRING, 6f, 6d, HLLC_STRING},
- new Object[]{timestamp("2001-01-02"), 1L, "def", "abc",
NULL_STRING, 5f, 5d, HLLC_STRING}
+ new Object[]{timestamp("2001-01-03"), "abc", NULL_STRING,
NULL_STRING, 1L, 6f, 6d, HLLC_STRING},
+ new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING,
1L, 5f, 5d, HLLC_STRING}
)
);
}
@@ -1455,12 +1455,12 @@ public class CalciteSelectQueryTest extends
BaseCalciteQueryTest
.build()
),
ImmutableList.of(
- new Object[]{timestamp("2000-01-01"), 1L, "", "a",
"[\"a\",\"b\"]", 1f, 1.0, HLLC_STRING},
- new Object[]{timestamp("2000-01-02"), 1L, "10.1", NULL_STRING,
"[\"b\",\"c\"]", 2f, 2.0, HLLC_STRING},
- new Object[]{timestamp("2000-01-03"), 1L, "2", "", "d", 3f, 3.0,
HLLC_STRING},
- new Object[]{timestamp("2001-01-01"), 1L, "1", "a", "", 4f, 4.0,
HLLC_STRING},
- new Object[]{timestamp("2001-01-02"), 1L, "def", "abc",
NULL_STRING, 5f, 5.0, HLLC_STRING},
- new Object[]{timestamp("2001-01-03"), 1L, "abc", NULL_STRING,
NULL_STRING, 6f, 6.0, HLLC_STRING}
+ new Object[]{timestamp("2000-01-01"), "", "a", "[\"a\",\"b\"]",
1L, 1f, 1.0, HLLC_STRING},
+ new Object[]{timestamp("2000-01-02"), "10.1", NULL_STRING,
"[\"b\",\"c\"]", 1L, 2f, 2.0, HLLC_STRING},
+ new Object[]{timestamp("2000-01-03"), "2", "", "d", 1L, 3f, 3.0,
HLLC_STRING},
+ new Object[]{timestamp("2001-01-01"), "1", "a", "", 1L, 4f, 4.0,
HLLC_STRING},
+ new Object[]{timestamp("2001-01-02"), "def", "abc", NULL_STRING,
1L, 5f, 5.0, HLLC_STRING},
+ new Object[]{timestamp("2001-01-03"), "abc", NULL_STRING,
NULL_STRING, 1L, 6f, 6.0, HLLC_STRING}
)
);
}
diff --git
a/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java
b/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java
index 708ba50c4d..36e575e2bd 100644
--- a/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/calcite/schema/DruidSchemaTest.java
@@ -78,6 +78,7 @@ import java.io.File;
import java.io.IOException;
import java.util.EnumSet;
import java.util.HashSet;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -206,7 +207,10 @@ public class DruidSchemaTest extends DruidSchemaTestCommon
CalciteTests.createMockQueryLifecycleFactory(walker, conglomerate),
serverView,
segmentManager,
- new MapJoinableFactory(ImmutableSet.of(globalTableJoinable),
ImmutableMap.of(globalTableJoinable.getClass(), GlobalTableDataSource.class)),
+ new MapJoinableFactory(
+ ImmutableSet.of(globalTableJoinable),
+ ImmutableMap.of(globalTableJoinable.getClass(),
GlobalTableDataSource.class)
+ ),
PLANNER_CONFIG_DEFAULT,
new NoopEscalator(),
new BrokerInternalQueryConfig(),
@@ -288,16 +292,16 @@ public class DruidSchemaTest extends DruidSchemaTestCommon
Assert.assertEquals("__time", fields.get(0).getName());
Assert.assertEquals(SqlTypeName.TIMESTAMP,
fields.get(0).getType().getSqlTypeName());
- Assert.assertEquals("cnt", fields.get(1).getName());
- Assert.assertEquals(SqlTypeName.BIGINT,
fields.get(1).getType().getSqlTypeName());
+ Assert.assertEquals("dim2", fields.get(1).getName());
+ Assert.assertEquals(SqlTypeName.VARCHAR,
fields.get(1).getType().getSqlTypeName());
- Assert.assertEquals("dim1", fields.get(2).getName());
- Assert.assertEquals(SqlTypeName.VARCHAR,
fields.get(2).getType().getSqlTypeName());
+ Assert.assertEquals("m1", fields.get(2).getName());
+ Assert.assertEquals(SqlTypeName.BIGINT,
fields.get(2).getType().getSqlTypeName());
- Assert.assertEquals("dim2", fields.get(3).getName());
+ Assert.assertEquals("dim1", fields.get(3).getName());
Assert.assertEquals(SqlTypeName.VARCHAR,
fields.get(3).getType().getSqlTypeName());
- Assert.assertEquals("m1", fields.get(4).getName());
+ Assert.assertEquals("cnt", fields.get(4).getName());
Assert.assertEquals(SqlTypeName.BIGINT,
fields.get(4).getType().getSqlTypeName());
Assert.assertEquals("unique_dim1", fields.get(5).getName());
@@ -1065,7 +1069,7 @@ public class DruidSchemaTest extends DruidSchemaTestCommon
new TableDataSource(segment.getDataSource()),
new MultipleSpecificSegmentSpec(
segmentIterable.stream()
-
.map(SegmentId::toDescriptor).collect(Collectors.toList())),
+
.map(SegmentId::toDescriptor).collect(Collectors.toList())),
new AllColumnIncluderator(),
false,
queryContext,
@@ -1094,7 +1098,8 @@ public class DruidSchemaTest extends DruidSchemaTestCommon
EasyMock.expect(factoryMock.factorize()).andReturn(lifecycleMock).once();
// This is the mat of the test, making sure that the query created by the
method under test matches the expected query, specifically the operator
configured context
- EasyMock.expect(lifecycleMock.runSimple(expectedMetadataQuery,
AllowAllAuthenticator.ALLOW_ALL_RESULT, Access.OK)).andReturn(null);
+ EasyMock.expect(lifecycleMock.runSimple(expectedMetadataQuery,
AllowAllAuthenticator.ALLOW_ALL_RESULT, Access.OK))
+ .andReturn(null);
EasyMock.replay(factoryMock, lifecycleMock);
@@ -1107,36 +1112,28 @@ public class DruidSchemaTest extends
DruidSchemaTestCommon
@Test
public void testSegmentMetadataColumnType()
{
+ // Verify order is preserved.
+ final LinkedHashMap<String, ColumnAnalysis> columns = new
LinkedHashMap<>();
+ columns.put(
+ "a",
+ new ColumnAnalysis(ColumnType.STRING,
ColumnType.STRING.asTypeString(), false, true, 1234, 26, "a", "z", null)
+ );
+
+ columns.put(
+ "count",
+ new ColumnAnalysis(ColumnType.LONG, ColumnType.LONG.asTypeString(),
false, true, 1234, 26, "a", "z", null)
+ );
+
+ columns.put(
+ "b",
+ new ColumnAnalysis(ColumnType.DOUBLE,
ColumnType.DOUBLE.asTypeString(), false, true, 1234, 26, null, null, null)
+ );
+
RowSignature signature = DruidSchema.analysisToRowSignature(
new SegmentAnalysis(
"id",
ImmutableList.of(Intervals.utc(1L, 2L)),
- ImmutableMap.of(
- "a",
- new ColumnAnalysis(
- ColumnType.STRING,
- ColumnType.STRING.asTypeString(),
- false,
- true,
- 1234,
- 26,
- "a",
- "z",
- null
- ),
- "count",
- new ColumnAnalysis(
- ColumnType.LONG,
- ColumnType.LONG.asTypeString(),
- false,
- true,
- 1234,
- 26,
- "a",
- "z",
- null
- )
- ),
+ columns,
1234,
100,
null,
@@ -1147,7 +1144,11 @@ public class DruidSchemaTest extends
DruidSchemaTestCommon
);
Assert.assertEquals(
- RowSignature.builder().add("a", ColumnType.STRING).add("count",
ColumnType.LONG).build(),
+ RowSignature.builder()
+ .add("a", ColumnType.STRING)
+ .add("count", ColumnType.LONG)
+ .add("b", ColumnType.DOUBLE)
+ .build(),
signature
);
}
@@ -1160,30 +1161,32 @@ public class DruidSchemaTest extends
DruidSchemaTestCommon
new SegmentAnalysis(
"id",
ImmutableList.of(Intervals.utc(1L, 2L)),
- ImmutableMap.of(
- "a",
- new ColumnAnalysis(
- null,
- ColumnType.STRING.asTypeString(),
- false,
- true,
- 1234,
- 26,
- "a",
- "z",
- null
- ),
- "count",
- new ColumnAnalysis(
- null,
- ColumnType.LONG.asTypeString(),
- false,
- true,
- 1234,
- 26,
+ new LinkedHashMap<>(
+ ImmutableMap.of(
"a",
- "z",
- null
+ new ColumnAnalysis(
+ null,
+ ColumnType.STRING.asTypeString(),
+ false,
+ true,
+ 1234,
+ 26,
+ "a",
+ "z",
+ null
+ ),
+ "count",
+ new ColumnAnalysis(
+ null,
+ ColumnType.LONG.asTypeString(),
+ false,
+ true,
+ 1234,
+ 26,
+ "a",
+ "z",
+ null
+ )
)
),
1234,
diff --git a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java
b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java
index 59a8d0b3e1..3219d3f87d 100644
--- a/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java
+++ b/sql/src/test/java/org/apache/druid/sql/http/SqlResourceTest.java
@@ -123,13 +123,13 @@ public class SqlResourceTest extends CalciteTestBase
private static final String DUMMY_SQL_QUERY_ID = "dummy";
private static final List<String> EXPECTED_COLUMNS_FOR_RESULT_FORMAT_TESTS =
- Arrays.asList("__time", "cnt", "dim1", "dim2", "dim3", "m1", "m2",
"unique_dim1", "EXPR$8");
+ Arrays.asList("__time", "dim1", "dim2", "dim3", "cnt", "m1", "m2",
"unique_dim1", "EXPR$8");
private static final List<String> EXPECTED_TYPES_FOR_RESULT_FORMAT_TESTS =
- Arrays.asList("LONG", "LONG", "STRING", "STRING", "STRING", "FLOAT",
"DOUBLE", "COMPLEX<hyperUnique>", "STRING");
+ Arrays.asList("LONG", "STRING", "STRING", "STRING", "LONG", "FLOAT",
"DOUBLE", "COMPLEX<hyperUnique>", "STRING");
private static final List<String> EXPECTED_SQL_TYPES_FOR_RESULT_FORMAT_TESTS
=
- Arrays.asList("TIMESTAMP", "BIGINT", "VARCHAR", "VARCHAR", "VARCHAR",
"FLOAT", "DOUBLE", "OTHER", "VARCHAR");
+ Arrays.asList("TIMESTAMP", "VARCHAR", "VARCHAR", "VARCHAR", "BIGINT",
"FLOAT", "DOUBLE", "OTHER", "VARCHAR");
private static QueryRunnerFactoryConglomerate conglomerate;
private static Closer resourceCloser;
@@ -544,10 +544,10 @@ public class SqlResourceTest extends CalciteTestBase
ImmutableList.of(
Arrays.asList(
"2000-01-01T00:00:00.000Z",
- 1,
"",
"a",
"[\"a\",\"b\"]",
+ 1,
1.0,
1.0,
"org.apache.druid.hll.VersionOneHyperLogLogCollector",
@@ -555,10 +555,10 @@ public class SqlResourceTest extends CalciteTestBase
),
Arrays.asList(
"2000-01-02T00:00:00.000Z",
- 1,
"10.1",
nullStr,
"[\"b\",\"c\"]",
+ 1,
2.0,
2.0,
"org.apache.druid.hll.VersionOneHyperLogLogCollector",
@@ -655,10 +655,10 @@ public class SqlResourceTest extends CalciteTestBase
EXPECTED_SQL_TYPES_FOR_RESULT_FORMAT_TESTS,
Arrays.asList(
"2000-01-01T00:00:00.000Z",
- 1,
"",
"a",
"[\"a\",\"b\"]",
+ 1,
1.0,
1.0,
"org.apache.druid.hll.VersionOneHyperLogLogCollector",
@@ -666,10 +666,10 @@ public class SqlResourceTest extends CalciteTestBase
),
Arrays.asList(
"2000-01-02T00:00:00.000Z",
- 1,
"10.1",
nullStr,
"[\"b\",\"c\"]",
+ 1,
2.0,
2.0,
"org.apache.druid.hll.VersionOneHyperLogLogCollector",
@@ -723,10 +723,10 @@ public class SqlResourceTest extends CalciteTestBase
Assert.assertEquals(
Arrays.asList(
"2000-01-01T00:00:00.000Z",
- 1,
"",
"a",
"[\"a\",\"b\"]",
+ 1,
1.0,
1.0,
"org.apache.druid.hll.VersionOneHyperLogLogCollector",
@@ -737,10 +737,10 @@ public class SqlResourceTest extends CalciteTestBase
Assert.assertEquals(
Arrays.asList(
"2000-01-02T00:00:00.000Z",
- 1,
"10.1",
nullStr,
"[\"b\",\"c\"]",
+ 1,
2.0,
2.0,
"org.apache.druid.hll.VersionOneHyperLogLogCollector",
@@ -771,10 +771,10 @@ public class SqlResourceTest extends CalciteTestBase
Assert.assertEquals(
Arrays.asList(
"2000-01-01T00:00:00.000Z",
- 1,
"",
"a",
"[\"a\",\"b\"]",
+ 1,
1.0,
1.0,
"org.apache.druid.hll.VersionOneHyperLogLogCollector",
@@ -785,10 +785,10 @@ public class SqlResourceTest extends CalciteTestBase
Assert.assertEquals(
Arrays.asList(
"2000-01-02T00:00:00.000Z",
- 1,
"10.1",
nullStr,
"[\"b\",\"c\"]",
+ 1,
2.0,
2.0,
"org.apache.druid.hll.VersionOneHyperLogLogCollector",
@@ -1099,8 +1099,8 @@ public class SqlResourceTest extends CalciteTestBase
Assert.assertEquals(
ImmutableList.of(
-
"2000-01-01T00:00:00.000Z,1,,a,\"[\"\"a\"\",\"\"b\"\"]\",1.0,1.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,",
-
"2000-01-02T00:00:00.000Z,1,10.1,,\"[\"\"b\"\",\"\"c\"\"]\",2.0,2.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,",
+
"2000-01-01T00:00:00.000Z,,a,\"[\"\"a\"\",\"\"b\"\"]\",1,1.0,1.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,",
+
"2000-01-02T00:00:00.000Z,10.1,,\"[\"\"b\"\",\"\"c\"\"]\",1,2.0,2.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,",
"",
""
),
@@ -1124,8 +1124,8 @@ public class SqlResourceTest extends CalciteTestBase
String.join(",", EXPECTED_COLUMNS_FOR_RESULT_FORMAT_TESTS),
String.join(",", EXPECTED_TYPES_FOR_RESULT_FORMAT_TESTS),
String.join(",", EXPECTED_SQL_TYPES_FOR_RESULT_FORMAT_TESTS),
-
"2000-01-01T00:00:00.000Z,1,,a,\"[\"\"a\"\",\"\"b\"\"]\",1.0,1.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,",
-
"2000-01-02T00:00:00.000Z,1,10.1,,\"[\"\"b\"\",\"\"c\"\"]\",2.0,2.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,",
+
"2000-01-01T00:00:00.000Z,,a,\"[\"\"a\"\",\"\"b\"\"]\",1,1.0,1.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,",
+
"2000-01-02T00:00:00.000Z,10.1,,\"[\"\"b\"\",\"\"c\"\"]\",1,2.0,2.0,org.apache.druid.hll.VersionOneHyperLogLogCollector,",
"",
""
),
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]