imply-cheddar commented on code in PR #15096: URL: https://github.com/apache/druid/pull/15096#discussion_r1348356518
########## sql/src/test/java/org/apache/druid/sql/calcite/DisableUnless.java: ########## @@ -0,0 +1,55 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.druid.sql.calcite; + +import com.google.common.base.Supplier; +import org.apache.druid.common.config.NullHandling; +import org.junit.rules.TestRule; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; + +import static org.junit.Assume.assumeTrue; + +/** + * Collection of conditional disabler rules. + */ +class DisableUnless +{ + public static final TestRule SQL_COMPATIBLE = new DisableUnlessRule("NullHandling::sqlCompatible", NullHandling::sqlCompatible); + + public static class DisableUnlessRule implements TestRule + { + private Supplier<Boolean> predicate; + private String message; + + public DisableUnlessRule(String message, Supplier<Boolean> predicate) + { + this.message = message; + this.predicate = predicate; + } + + @Override + public Statement apply(Statement base, Description description) + { + assumeTrue("Testcase disabled; because condition not net: " + message, predicate.get()); Review Comment: s/net/met/ ########## sql/src/main/java/org/apache/druid/sql/calcite/planner/CalciteRulesManager.java: ########## @@ -233,7 +233,13 @@ public List<Program> programs(final PlannerContext plannerContext) boolean isDebug = plannerContext.queryContext().isDebug(); return ImmutableList.of( - Programs.sequence(preProgram, Programs.ofRules(druidConventionRuleSet(plannerContext))), + Programs.sequence( + new LoggingProgram("Start", isDebug), + preProgram, + new LoggingProgram("After preProgram", isDebug), + Programs.ofRules(druidConventionRuleSet(plannerContext)), + new LoggingProgram("After volcano planner program", isDebug) + ), Review Comment: I'm guessing you added this for debugging, I suspect you intended to remove before merge. ########## sql/src/test/java/org/apache/druid/sql/calcite/BaseCalciteQueryTest.java: ########## @@ -1056,7 +1056,7 @@ public Map<String, Object> baseQueryContext() public void assertResultsEquals(String sql, List<Object[]> expectedResults, List<Object[]> results) { - for (int i = 0; i < results.size(); i++) { + for (int i = 0; i < Math.min(results.size(), expectedResults.size()); i++) { Review Comment: totally crazy stream-of-conciousness nit: I don't think that the JVM can optimize the conditional here as the various lists *could* have their sizes change. If you just use your IDE to extract this as a variable and then use that, it doesn't have to re-evaluate it. I doubt this would actually show up in profiling as consuming time, but still... ########## sql/src/test/java/org/apache/druid/sql/calcite/NegativeTest.java: ########## @@ -36,23 +37,52 @@ import static org.junit.Assert.assertThrows; /** - * Can be used to mark tests which are not-yet supported in decoupled mode. + * Can be used to mark tests which are not-yet supported for some reason. + * + * In case a testcase marked with this annotation fails - it means that the + * testcase no longer fails with the annotated expectation. Review Comment: Suggestion: ... no longer fails with the annotated expectation. This means that a code change affected this test either 1) it suddenly passes: yay, assuming it makes sense that it suddenly passes, remove the annotation and move on 2) it suddenly fails with a different error: validate that the new error is expected and either fix to continue failing with the old error or update the expected error. ########## sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java: ########## @@ -142,65 +266,220 @@ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( new LongDimensionSchema("col6"), new StringDimensionSchema("col7"), new StringDimensionSchema("col8"), - new StringDimensionSchema("col9") - ); + new StringDimensionSchema("col9")); + attachIndex( + retVal, + "smlTbl.parquet", + // "col_int": 8122, + new LongDimensionSchema("col_int"), + // "col_bgint": 817200, + new LongDimensionSchema("col_bgint"), + // "col_char_2": "IN", + new StringDimensionSchema("col_char_2"), + // "col_vchar_52": + // "AXXXXXXXXXXXXXXXXXXXXXXXXXCXXXXXXXXXXXXXXXXXXXXXXXXB", + new StringDimensionSchema("col_vchar_52"), + // "col_tmstmp": 1409617682418, + new LongDimensionSchema("col_tmstmp"), + // "col_dt": 422717616000000, + new LongDimensionSchema("col_dt"), + // "col_booln": false, + new StringDimensionSchema("col_booln"), + // "col_dbl": 12900.48, + new DoubleDimensionSchema("col_dbl"), + // "col_tm": 33109170 + new LongDimensionSchema("col_tm")); + attachIndex( + retVal, + "fewRowsAllData.parquet", + // "col0":12024, + new LongDimensionSchema("col0"), + // "col1":307168, + new LongDimensionSchema("col1"), + // "col2":"VT", + new StringDimensionSchema("col2"), + // "col3":"DXXXXXXXXXXXXXXXXXXXXXXXXXEXXXXXXXXXXXXXXXXXXXXXXXXF", + new StringDimensionSchema("col3"), + // "col4":1338596882419, + new LongDimensionSchema("col4"), + // "col5":422705433600000, + new LongDimensionSchema("col5"), + // "col6":true, + new StringDimensionSchema("col6"), + // "col7":3.95110006277E8, + new DoubleDimensionSchema("col7"), + // "col8":67465430 + new LongDimensionSchema("col8")); + attachIndex( + retVal, + "t_alltype.parquet", + // "c1":1, + new LongDimensionSchema("c1"), + // "c2":592475043, + new LongDimensionSchema("c2"), + // "c3":616080519999272, + new LongDimensionSchema("c3"), + // "c4":"ObHeWTDEcbGzssDwPwurfs", + new StringDimensionSchema("c4"), + // "c5":"0sZxIfZ CGwTOaLWZ6nWkUNx", + new StringDimensionSchema("c5"), + // "c6":1456290852307, + new LongDimensionSchema("c6"), + // "c7":421426627200000, + new LongDimensionSchema("c7"), + // "c8":true, + new StringDimensionSchema("c8"), + // "c9":0.626179100469 + new DoubleDimensionSchema("c9")); return retVal; } - @Test + public class TextualResultsVerifier implements ResultsVerifier + { + protected final List<String[]> expectedResultsText; + @Nullable + protected final RowSignature expectedResultRowSignature; + private RowSignature currentRowSignature; + + public TextualResultsVerifier(List<String[]> expectedResultsString, RowSignature expectedSignature) + { + this.expectedResultsText = expectedResultsString; + this.expectedResultRowSignature = expectedSignature; + } + + @Override + public void verifyRowSignature(RowSignature rowSignature) + { + if (expectedResultRowSignature != null) { + Assert.assertEquals(expectedResultRowSignature, rowSignature); + } + currentRowSignature = rowSignature; + } + + @Override + public void verify(String sql, List<Object[]> results) + { + List<Object[]> expectedResults = parseResults(currentRowSignature, expectedResultsText); + try { + Assert.assertEquals(StringUtils.format("result count: %s", sql), expectedResultsText.size(), results.size()); + if (!isOrdered(sql)) { + results.sort(new ArrayRowCmp()); + expectedResults.sort(new ArrayRowCmp()); + } else { + assertResultsEquals(sql, expectedResults, results); + } + } + catch (AssertionError e) { + displayResults(expectedResults); + System.out.println("query: " + sql); + displayResults(results); Review Comment: I'm pretty sure one of the robots is gonna yell at you for `System.out.println` ########## sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java: ########## @@ -142,65 +266,220 @@ public SpecificSegmentsQuerySegmentWalker createQuerySegmentWalker( new LongDimensionSchema("col6"), new StringDimensionSchema("col7"), new StringDimensionSchema("col8"), - new StringDimensionSchema("col9") - ); + new StringDimensionSchema("col9")); + attachIndex( + retVal, + "smlTbl.parquet", + // "col_int": 8122, + new LongDimensionSchema("col_int"), + // "col_bgint": 817200, + new LongDimensionSchema("col_bgint"), + // "col_char_2": "IN", + new StringDimensionSchema("col_char_2"), + // "col_vchar_52": + // "AXXXXXXXXXXXXXXXXXXXXXXXXXCXXXXXXXXXXXXXXXXXXXXXXXXB", + new StringDimensionSchema("col_vchar_52"), + // "col_tmstmp": 1409617682418, + new LongDimensionSchema("col_tmstmp"), + // "col_dt": 422717616000000, + new LongDimensionSchema("col_dt"), + // "col_booln": false, + new StringDimensionSchema("col_booln"), + // "col_dbl": 12900.48, + new DoubleDimensionSchema("col_dbl"), + // "col_tm": 33109170 + new LongDimensionSchema("col_tm")); + attachIndex( + retVal, + "fewRowsAllData.parquet", + // "col0":12024, + new LongDimensionSchema("col0"), + // "col1":307168, + new LongDimensionSchema("col1"), + // "col2":"VT", + new StringDimensionSchema("col2"), + // "col3":"DXXXXXXXXXXXXXXXXXXXXXXXXXEXXXXXXXXXXXXXXXXXXXXXXXXF", + new StringDimensionSchema("col3"), + // "col4":1338596882419, + new LongDimensionSchema("col4"), + // "col5":422705433600000, + new LongDimensionSchema("col5"), + // "col6":true, + new StringDimensionSchema("col6"), + // "col7":3.95110006277E8, + new DoubleDimensionSchema("col7"), + // "col8":67465430 + new LongDimensionSchema("col8")); + attachIndex( + retVal, + "t_alltype.parquet", + // "c1":1, + new LongDimensionSchema("c1"), + // "c2":592475043, + new LongDimensionSchema("c2"), + // "c3":616080519999272, + new LongDimensionSchema("c3"), + // "c4":"ObHeWTDEcbGzssDwPwurfs", + new StringDimensionSchema("c4"), + // "c5":"0sZxIfZ CGwTOaLWZ6nWkUNx", + new StringDimensionSchema("c5"), + // "c6":1456290852307, + new LongDimensionSchema("c6"), + // "c7":421426627200000, + new LongDimensionSchema("c7"), + // "c8":true, + new StringDimensionSchema("c8"), + // "c9":0.626179100469 + new DoubleDimensionSchema("c9")); return retVal; } - @Test + public class TextualResultsVerifier implements ResultsVerifier + { + protected final List<String[]> expectedResultsText; + @Nullable + protected final RowSignature expectedResultRowSignature; + private RowSignature currentRowSignature; + + public TextualResultsVerifier(List<String[]> expectedResultsString, RowSignature expectedSignature) + { + this.expectedResultsText = expectedResultsString; + this.expectedResultRowSignature = expectedSignature; + } + + @Override + public void verifyRowSignature(RowSignature rowSignature) + { + if (expectedResultRowSignature != null) { + Assert.assertEquals(expectedResultRowSignature, rowSignature); + } + currentRowSignature = rowSignature; + } + + @Override + public void verify(String sql, List<Object[]> results) + { + List<Object[]> expectedResults = parseResults(currentRowSignature, expectedResultsText); + try { + Assert.assertEquals(StringUtils.format("result count: %s", sql), expectedResultsText.size(), results.size()); + if (!isOrdered(sql)) { + results.sort(new ArrayRowCmp()); + expectedResults.sort(new ArrayRowCmp()); + } else { + assertResultsEquals(sql, expectedResults, results); + } + } + catch (AssertionError e) { + displayResults(expectedResults); + System.out.println("query: " + sql); + displayResults(results); + throw e; + } + } + + private boolean isOrdered(String sql) + { + // FIXME: SqlToRelConverter.isOrdered(null) would be better + sql = sql.toLowerCase(Locale.ENGLISH).replace('\n', ' '); + sql = sql.substring(sql.lastIndexOf(')')); + return sql.contains("order"); + } + } + + static class ArrayRowCmp implements Comparator<Object[]> + { + @Override + public int compare(Object[] arg0, Object[] arg1) + { + String s0 = Arrays.toString(arg0); + String s1 = Arrays.toString(arg1); + return s0.compareTo(s1); + } + } + + private static List<Object[]> parseResults(RowSignature rs, List<String[]> results) + { + Predicate<String> longPattern = Pattern.compile("^-?[0-9]+$").asPredicate(); + Predicate<String> doublePattern = Pattern.compile("^-?[0-9]+\\.[0-9]+$").asPredicate(); + List<Object[]> ret = new ArrayList<>(); + for (String[] row : results) { + Object[] newRow = new Object[row.length]; + List<String> cc = rs.getColumnNames(); + for (int i = 0; i < cc.size(); i++) { + ColumnType type = rs.getColumnType(i).get(); + assertNull(type.getComplexTypeName()); + final String val = row[i]; + Object newVal; + if ("null".equals(val)) { + newVal = null; + } else { + switch (type.getType()) { + case STRING: + newVal = val; + break; + case LONG: + if ("".equals(val)) { + newVal = null; + } else if (longPattern.test(val)) { + newVal = Numbers.parseLong(val); + } else if ("0E-20".equals(val)) { + newVal = 0L; + } else if (doublePattern.test(val)) { + double d = Doubles.tryParse(val); + newVal = (long) d; + } else { + Function<String, DateTime> parser = TimestampParser.createTimestampParser("auto"); + try { + newVal = parser.apply(val); + } + catch (IllegalArgumentException iae) { + LocalTime v = LocalTime.parse(val); + newVal = v.getMillisOfDay(); + } + } + break; + case DOUBLE: + newVal = Numbers.parseDoubleObject(val); + break; + default: + throw new RuntimeException("unimplemented"); + } + } + newRow[i] = newVal; + } + ret.add(newRow); + } + return ret; + } + public void windowQueryTest() { Thread thread = null; String oldName = null; try { thread = Thread.currentThread(); oldName = thread.getName(); - thread.setName("drillWindowQuery-" + filename); - - final String query = getQueryString(); - final String results = getExpectedResults(); + thread.setName("drillWindowQuery-" + testCase.filename); testBuilder() .skipVectorize(true) - .sql(query) - .queryContext(ImmutableMap.of("windowsAreForClosers", true, "windowsAllTheWayDown", true)) - .expectedResults((sql, results1) -> Assert.assertEquals(results, String.valueOf(results1))) + .queryContext(ImmutableMap.of( + PlannerContext.CTX_ENABLE_WINDOW_FNS, true, + "windowsAllTheWayDown", true, + QueryContexts.ENABLE_DEBUG, true, + PlannerConfig.CTX_KEY_USE_APPROXIMATE_COUNT_DISTINCT, false)) Review Comment: Why did we have to turn off approx count distinct for these tests? ########## sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java: ########## @@ -36,98 +37,221 @@ import org.apache.druid.data.input.impl.StringDimensionSchema; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.Intervals; +import org.apache.druid.java.util.common.Numbers; import org.apache.druid.java.util.common.RE; -import org.apache.druid.java.util.common.logger.Logger; +import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.java.util.common.parsers.TimestampParser; +import org.apache.druid.query.QueryContexts; import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.segment.IndexBuilder; import org.apache.druid.segment.QueryableIndex; +import org.apache.druid.segment.column.ColumnType; +import org.apache.druid.segment.column.RowSignature; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.writeout.OnHeapMemorySegmentWriteOutMediumFactory; +import org.apache.druid.sql.calcite.NegativeTest.Modes; +import org.apache.druid.sql.calcite.NegativeTest.NegativeTestProcessor; +import org.apache.druid.sql.calcite.planner.PlannerConfig; +import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.NumberedShardSpec; +import org.joda.time.DateTime; +import org.joda.time.LocalTime; import org.junit.Assert; +import org.junit.Rule; import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.rules.TestRule; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; import javax.annotation.Nonnull; +import javax.annotation.Nullable; + import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.lang.reflect.Method; import java.net.URL; import java.nio.charset.StandardCharsets; +import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Locale; import java.util.Map; +import java.util.Set; +import java.util.function.Predicate; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.fail; /** - * These test cases are borrowed from the drill-test-framework at https://github.com/apache/drill-test-framework + * These test cases are borrowed from the drill-test-framework at + * https://github.com/apache/drill-test-framework * <p> - * The Drill data sources are just accessing parquet files directly, we ingest and index the data first via JSON - * (so that we avoid pulling in the parquet dependencies here) and then run the queries over that. + * The Drill data sources are just accessing parquet files directly, we ingest + * and index the data first via JSON (so that we avoid pulling in the parquet + * dependencies here) and then run the queries over that. * <p> - * The setup of the ingestion is done via code in this class, so any new data source needs to be added through that - * manner. That said, these tests are primarily being added to bootstrap our own test coverage of window - * functions, so it is believed that most iteration on tests will happen through the CalciteWindowQueryTest - * instead of this class. + * The setup of the ingestion is done via code in this class, so any new data + * source needs to be added through that manner. That said, these tests are + * primarily being added to bootstrap our own test coverage of window functions, + * so it is believed that most iteration on tests will happen through the + * CalciteWindowQueryTest instead of this class. */ -@RunWith(Parameterized.class) public class DrillWindowQueryTest extends BaseCalciteQueryTest { - private static final Logger log = new Logger(DrillWindowQueryTest.class); + private static final ObjectMapper MAPPER = new DefaultObjectMapper(); + private DrillTestCase testCase = null; static { NullHandling.initializeForTests(); } - @Parameterized.Parameters(name = "{0}") - public static Object parametersForWindowQueryTest() throws Exception + @Rule + public TestRule disableWhenNonSqlCompat = DisableUnless.SQL_COMPATIBLE; + + @Rule + public NegativeTestProcessor ignoreProcessor = new NegativeTestProcessor(); + + @Rule + public DrillTestCaseLoaderRule drillTestCaseRule = new DrillTestCaseLoaderRule(); + + @Test + public void ensureAllDeclared() throws Exception { final URL windowQueriesUrl = ClassLoader.getSystemResource("drill/window/queries/"); - File windowFolder = new File(windowQueriesUrl.toURI()); - int windowFolderPrefixSize = windowFolder.getAbsolutePath().length() + 1 /* 1 for the ending slash */; + Path windowFolder = new File(windowQueriesUrl.toURI()).toPath(); - return FileUtils - .streamFiles(windowFolder, true, "q") + Set<String> allCases = FileUtils + .streamFiles(windowFolder.toFile(), true, "q") .map(file -> { - final String filePath = file.getAbsolutePath(); - return filePath.substring(windowFolderPrefixSize, filePath.length() - 2); + return windowFolder.relativize(file.toPath()).toString(); }) - .sorted() - .toArray(); + .sorted().collect(Collectors.toSet()); + + for (Method method : DrillWindowQueryTest.class.getDeclaredMethods()) { + DrillTest ann = method.getAnnotation(DrillTest.class); + if (method.getAnnotation(Test.class) == null || ann == null) { + continue; + } + if (allCases.remove(ann.value() + ".q")) { + continue; + } + fail("found testcase referencing invalid file: " + method.getName()); Review Comment: Additionally, this is a comment on the structure of the message. But, can you follow the conventions here: https://github.com/apache/druid/blob/master/dev/style-conventions.md ########## sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java: ########## @@ -36,98 +37,221 @@ import org.apache.druid.data.input.impl.StringDimensionSchema; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.Intervals; +import org.apache.druid.java.util.common.Numbers; import org.apache.druid.java.util.common.RE; -import org.apache.druid.java.util.common.logger.Logger; +import org.apache.druid.java.util.common.StringUtils; +import org.apache.druid.java.util.common.parsers.TimestampParser; +import org.apache.druid.query.QueryContexts; import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.segment.IndexBuilder; import org.apache.druid.segment.QueryableIndex; +import org.apache.druid.segment.column.ColumnType; +import org.apache.druid.segment.column.RowSignature; import org.apache.druid.segment.incremental.IncrementalIndexSchema; import org.apache.druid.segment.join.JoinableFactoryWrapper; import org.apache.druid.segment.writeout.OnHeapMemorySegmentWriteOutMediumFactory; +import org.apache.druid.sql.calcite.NegativeTest.Modes; +import org.apache.druid.sql.calcite.NegativeTest.NegativeTestProcessor; +import org.apache.druid.sql.calcite.planner.PlannerConfig; +import org.apache.druid.sql.calcite.planner.PlannerContext; import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.NumberedShardSpec; +import org.joda.time.DateTime; +import org.joda.time.LocalTime; import org.junit.Assert; +import org.junit.Rule; import org.junit.Test; -import org.junit.runner.RunWith; -import org.junit.runners.Parameterized; +import org.junit.rules.TestRule; +import org.junit.runner.Description; +import org.junit.runners.model.Statement; import javax.annotation.Nonnull; +import javax.annotation.Nullable; + import java.io.File; import java.io.IOException; import java.io.InputStream; +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; +import java.lang.reflect.Method; import java.net.URL; import java.nio.charset.StandardCharsets; +import java.nio.file.Path; import java.util.ArrayList; import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Locale; import java.util.Map; +import java.util.Set; +import java.util.function.Predicate; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.fail; /** - * These test cases are borrowed from the drill-test-framework at https://github.com/apache/drill-test-framework + * These test cases are borrowed from the drill-test-framework at + * https://github.com/apache/drill-test-framework * <p> - * The Drill data sources are just accessing parquet files directly, we ingest and index the data first via JSON - * (so that we avoid pulling in the parquet dependencies here) and then run the queries over that. + * The Drill data sources are just accessing parquet files directly, we ingest + * and index the data first via JSON (so that we avoid pulling in the parquet + * dependencies here) and then run the queries over that. * <p> - * The setup of the ingestion is done via code in this class, so any new data source needs to be added through that - * manner. That said, these tests are primarily being added to bootstrap our own test coverage of window - * functions, so it is believed that most iteration on tests will happen through the CalciteWindowQueryTest - * instead of this class. + * The setup of the ingestion is done via code in this class, so any new data + * source needs to be added through that manner. That said, these tests are + * primarily being added to bootstrap our own test coverage of window functions, + * so it is believed that most iteration on tests will happen through the + * CalciteWindowQueryTest instead of this class. */ -@RunWith(Parameterized.class) public class DrillWindowQueryTest extends BaseCalciteQueryTest { - private static final Logger log = new Logger(DrillWindowQueryTest.class); + private static final ObjectMapper MAPPER = new DefaultObjectMapper(); + private DrillTestCase testCase = null; static { NullHandling.initializeForTests(); } - @Parameterized.Parameters(name = "{0}") - public static Object parametersForWindowQueryTest() throws Exception + @Rule + public TestRule disableWhenNonSqlCompat = DisableUnless.SQL_COMPATIBLE; + + @Rule + public NegativeTestProcessor ignoreProcessor = new NegativeTestProcessor(); + + @Rule + public DrillTestCaseLoaderRule drillTestCaseRule = new DrillTestCaseLoaderRule(); + + @Test + public void ensureAllDeclared() throws Exception { final URL windowQueriesUrl = ClassLoader.getSystemResource("drill/window/queries/"); - File windowFolder = new File(windowQueriesUrl.toURI()); - int windowFolderPrefixSize = windowFolder.getAbsolutePath().length() + 1 /* 1 for the ending slash */; + Path windowFolder = new File(windowQueriesUrl.toURI()).toPath(); - return FileUtils - .streamFiles(windowFolder, true, "q") + Set<String> allCases = FileUtils + .streamFiles(windowFolder.toFile(), true, "q") .map(file -> { - final String filePath = file.getAbsolutePath(); - return filePath.substring(windowFolderPrefixSize, filePath.length() - 2); + return windowFolder.relativize(file.toPath()).toString(); }) - .sorted() - .toArray(); + .sorted().collect(Collectors.toSet()); + + for (Method method : DrillWindowQueryTest.class.getDeclaredMethods()) { + DrillTest ann = method.getAnnotation(DrillTest.class); + if (method.getAnnotation(Test.class) == null || ann == null) { + continue; + } + if (allCases.remove(ann.value() + ".q")) { + continue; + } + fail("found testcase referencing invalid file: " + method.getName()); + } + + for (String string : allCases) { + string = string.substring(0, string.lastIndexOf('.')); + System.out.printf(Locale.ENGLISH, "@%s( \"%s\" )\n" + + "@Test\n" + + "public void test_%s() {\n" + + " windowQueryTest();\n" + + "}\n", + DrillTest.class.getSimpleName(), + string, + string.replace('/', '_')); + } Review Comment: I think that this is to make it easy to dump the tests in. You can just copy and paste the output into the test class and you are done. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: [email protected] For queries about this service, please contact Infrastructure at: [email protected] --------------------------------------------------------------------- To unsubscribe, e-mail: [email protected] For additional commands, e-mail: [email protected]
