abhishekagarwal87 commented on code in PR #15096:
URL: https://github.com/apache/druid/pull/15096#discussion_r1348288640


##########
sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java:
##########
@@ -36,98 +37,221 @@
 import org.apache.druid.data.input.impl.StringDimensionSchema;
 import org.apache.druid.jackson.DefaultObjectMapper;
 import org.apache.druid.java.util.common.Intervals;
+import org.apache.druid.java.util.common.Numbers;
 import org.apache.druid.java.util.common.RE;
-import org.apache.druid.java.util.common.logger.Logger;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.java.util.common.parsers.TimestampParser;
+import org.apache.druid.query.QueryContexts;
 import org.apache.druid.query.QueryRunnerFactoryConglomerate;
 import org.apache.druid.segment.IndexBuilder;
 import org.apache.druid.segment.QueryableIndex;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
 import org.apache.druid.segment.incremental.IncrementalIndexSchema;
 import org.apache.druid.segment.join.JoinableFactoryWrapper;
 import 
org.apache.druid.segment.writeout.OnHeapMemorySegmentWriteOutMediumFactory;
+import org.apache.druid.sql.calcite.NegativeTest.Modes;
+import org.apache.druid.sql.calcite.NegativeTest.NegativeTestProcessor;
+import org.apache.druid.sql.calcite.planner.PlannerConfig;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
 import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
 import org.apache.druid.timeline.DataSegment;
 import org.apache.druid.timeline.partition.NumberedShardSpec;
+import org.joda.time.DateTime;
+import org.joda.time.LocalTime;
 import org.junit.Assert;
+import org.junit.Rule;
 import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.rules.TestRule;
+import org.junit.runner.Description;
+import org.junit.runners.model.Statement;
 
 import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.reflect.Method;
 import java.net.URL;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Locale;
 import java.util.Map;
+import java.util.Set;
+import java.util.function.Predicate;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.fail;
 
 /**
- * These test cases are borrowed from the drill-test-framework at 
https://github.com/apache/drill-test-framework
+ * These test cases are borrowed from the drill-test-framework at
+ * https://github.com/apache/drill-test-framework
  * <p>
- * The Drill data sources are just accessing parquet files directly, we ingest 
and index the data first via JSON
- * (so that we avoid pulling in the parquet dependencies here) and then run 
the queries over that.
+ * The Drill data sources are just accessing parquet files directly, we ingest
+ * and index the data first via JSON (so that we avoid pulling in the parquet
+ * dependencies here) and then run the queries over that.
  * <p>
- * The setup of the ingestion is done via code in this class, so any new data 
source needs to be added through that
- * manner.  That said, these tests are primarily being added to bootstrap our 
own test coverage of window
- * functions, so it is believed that most iteration on tests will happen 
through the CalciteWindowQueryTest
- * instead of this class.
+ * The setup of the ingestion is done via code in this class, so any new data
+ * source needs to be added through that manner. That said, these tests are
+ * primarily being added to bootstrap our own test coverage of window 
functions,
+ * so it is believed that most iteration on tests will happen through the
+ * CalciteWindowQueryTest instead of this class.
  */
-@RunWith(Parameterized.class)
 public class DrillWindowQueryTest extends BaseCalciteQueryTest
 {
-  private static final Logger log = new Logger(DrillWindowQueryTest.class);
+  private static final ObjectMapper MAPPER = new DefaultObjectMapper();
+  private DrillTestCase testCase = null;
 
   static {
     NullHandling.initializeForTests();
   }
 
-  @Parameterized.Parameters(name = "{0}")
-  public static Object parametersForWindowQueryTest() throws Exception
+  @Rule
+  public TestRule disableWhenNonSqlCompat = DisableUnless.SQL_COMPATIBLE;
+
+  @Rule
+  public NegativeTestProcessor ignoreProcessor = new NegativeTestProcessor();
+
+  @Rule
+  public DrillTestCaseLoaderRule drillTestCaseRule = new 
DrillTestCaseLoaderRule();
+
+  @Test
+  public void ensureAllDeclared() throws Exception
   {
     final URL windowQueriesUrl = 
ClassLoader.getSystemResource("drill/window/queries/");
-    File windowFolder = new File(windowQueriesUrl.toURI());
-    int windowFolderPrefixSize = windowFolder.getAbsolutePath().length() + 1 
/* 1 for the ending slash */;
+    Path windowFolder = new File(windowQueriesUrl.toURI()).toPath();
 
-    return FileUtils
-        .streamFiles(windowFolder, true, "q")
+    Set<String> allCases = FileUtils
+        .streamFiles(windowFolder.toFile(), true, "q")
         .map(file -> {
-          final String filePath = file.getAbsolutePath();
-          return filePath.substring(windowFolderPrefixSize, filePath.length() 
- 2);
+          return windowFolder.relativize(file.toPath()).toString();
         })
-        .sorted()
-        .toArray();
+        .sorted().collect(Collectors.toSet());
+
+    for (Method method : DrillWindowQueryTest.class.getDeclaredMethods()) {
+      DrillTest ann = method.getAnnotation(DrillTest.class);
+      if (method.getAnnotation(Test.class) == null || ann == null) {
+        continue;
+      }
+      if (allCases.remove(ann.value() + ".q")) {
+        continue;
+      }
+      fail("found testcase referencing invalid file: " + method.getName());

Review Comment:
   could you add the value of the file too? 



##########
sql/src/test/java/org/apache/druid/sql/calcite/NegativeTest.java:
##########
@@ -36,23 +37,52 @@
 import static org.junit.Assert.assertThrows;
 
 /**
- * Can be used to mark tests which are not-yet supported in decoupled mode.
+ * Can be used to mark tests which are not-yet supported for some reason.
+ *
+ * In case a testcase marked with this annotation fails - it means that the
+ * testcase no longer fails with the annotated expectation.
+ *
+ * During usage; the annotation process have to be added to the testclass.
+ * Ensure that it's loaded as the most outer-rule by using order=0 - otherwise
+ * it may interfere with other rules:
+ * <code>
+ *   @Rule(order = 0)
+ *   public NegativeTestProcessor decoupledIgnoreProcessor = new 
NegativeTestProcessor();
+ *
+ *   @NegativeTest
+ *   @Test
+ *   public void testA() {
+ *   }
+ * </code>
  *
- * In case a testcase marked with this annotation fails - it may mean that the
- * testcase no longer needs that annotation.
  */
 @Retention(RetentionPolicy.RUNTIME)
 @Target({ElementType.METHOD})
-public @interface DecoupledIgnore
+public @interface NegativeTest

Review Comment:
   How about IgnoredTest? Negative in context of tests implies that the it 
tests a negative path like an exception being thrown. What do you think? 



##########
sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java:
##########
@@ -142,65 +266,220 @@ public SpecificSegmentsQuerySegmentWalker 
createQuerySegmentWalker(
         new LongDimensionSchema("col6"),
         new StringDimensionSchema("col7"),
         new StringDimensionSchema("col8"),
-        new StringDimensionSchema("col9")
-    );
+        new StringDimensionSchema("col9"));
+    attachIndex(
+        retVal,
+        "smlTbl.parquet",
+        // "col_int": 8122,
+        new LongDimensionSchema("col_int"),
+        // "col_bgint": 817200,
+        new LongDimensionSchema("col_bgint"),
+        // "col_char_2": "IN",
+        new StringDimensionSchema("col_char_2"),
+        // "col_vchar_52":
+        // "AXXXXXXXXXXXXXXXXXXXXXXXXXCXXXXXXXXXXXXXXXXXXXXXXXXB",
+        new StringDimensionSchema("col_vchar_52"),
+        // "col_tmstmp": 1409617682418,
+        new LongDimensionSchema("col_tmstmp"),
+        // "col_dt": 422717616000000,
+        new LongDimensionSchema("col_dt"),
+        // "col_booln": false,
+        new StringDimensionSchema("col_booln"),
+        // "col_dbl": 12900.48,
+        new DoubleDimensionSchema("col_dbl"),
+        // "col_tm": 33109170
+        new LongDimensionSchema("col_tm"));
+    attachIndex(
+        retVal,
+        "fewRowsAllData.parquet",
+        // "col0":12024,
+        new LongDimensionSchema("col0"),
+        // "col1":307168,
+        new LongDimensionSchema("col1"),
+        // "col2":"VT",
+        new StringDimensionSchema("col2"),
+        // "col3":"DXXXXXXXXXXXXXXXXXXXXXXXXXEXXXXXXXXXXXXXXXXXXXXXXXXF",
+        new StringDimensionSchema("col3"),
+        // "col4":1338596882419,
+        new LongDimensionSchema("col4"),
+        // "col5":422705433600000,
+        new LongDimensionSchema("col5"),
+        // "col6":true,
+        new StringDimensionSchema("col6"),
+        // "col7":3.95110006277E8,
+        new DoubleDimensionSchema("col7"),
+        // "col8":67465430
+        new LongDimensionSchema("col8"));
+    attachIndex(
+        retVal,
+        "t_alltype.parquet",
+        // "c1":1,
+        new LongDimensionSchema("c1"),
+        // "c2":592475043,
+        new LongDimensionSchema("c2"),
+        // "c3":616080519999272,
+        new LongDimensionSchema("c3"),
+        // "c4":"ObHeWTDEcbGzssDwPwurfs",
+        new StringDimensionSchema("c4"),
+        // "c5":"0sZxIfZ CGwTOaLWZ6nWkUNx",
+        new StringDimensionSchema("c5"),
+        // "c6":1456290852307,
+        new LongDimensionSchema("c6"),
+        // "c7":421426627200000,
+        new LongDimensionSchema("c7"),
+        // "c8":true,
+        new StringDimensionSchema("c8"),
+        // "c9":0.626179100469
+        new DoubleDimensionSchema("c9"));
 
     return retVal;
   }
 
-  @Test
+  public class TextualResultsVerifier implements ResultsVerifier
+  {
+    protected final List<String[]> expectedResultsText;
+    @Nullable
+    protected final RowSignature expectedResultRowSignature;
+    private RowSignature currentRowSignature;
+
+    public TextualResultsVerifier(List<String[]> expectedResultsString, 
RowSignature expectedSignature)
+    {
+      this.expectedResultsText = expectedResultsString;
+      this.expectedResultRowSignature = expectedSignature;
+    }
+
+    @Override
+    public void verifyRowSignature(RowSignature rowSignature)
+    {
+      if (expectedResultRowSignature != null) {
+        Assert.assertEquals(expectedResultRowSignature, rowSignature);
+      }
+      currentRowSignature = rowSignature;
+    }
+
+    @Override
+    public void verify(String sql, List<Object[]> results)
+    {
+      List<Object[]> expectedResults = parseResults(currentRowSignature, 
expectedResultsText);
+      try {
+        Assert.assertEquals(StringUtils.format("result count: %s", sql), 
expectedResultsText.size(), results.size());
+        if (!isOrdered(sql)) {
+          results.sort(new ArrayRowCmp());
+          expectedResults.sort(new ArrayRowCmp());
+        } else {
+          assertResultsEquals(sql, expectedResults, results);
+        }
+      }
+      catch (AssertionError e) {
+        displayResults(expectedResults);
+        System.out.println("query: " + sql);
+        displayResults(results);
+        throw e;
+      }
+    }
+
+    private boolean isOrdered(String sql)
+    {
+      // FIXME: SqlToRelConverter.isOrdered(null) would be better
+      sql = sql.toLowerCase(Locale.ENGLISH).replace('\n', ' ');
+      sql = sql.substring(sql.lastIndexOf(')'));
+      return sql.contains("order");
+    }
+  }
+
+  static class ArrayRowCmp implements Comparator<Object[]>
+  {
+    @Override
+    public int compare(Object[] arg0, Object[] arg1)
+    {
+      String s0 = Arrays.toString(arg0);
+      String s1 = Arrays.toString(arg1);
+      return s0.compareTo(s1);
+    }
+  }
+
+  private static List<Object[]> parseResults(RowSignature rs, List<String[]> 
results)
+  {
+    Predicate<String> longPattern = 
Pattern.compile("^-?[0-9]+$").asPredicate();
+    Predicate<String> doublePattern = 
Pattern.compile("^-?[0-9]+\\.[0-9]+$").asPredicate();
+    List<Object[]> ret = new ArrayList<>();
+    for (String[] row : results) {
+      Object[] newRow = new Object[row.length];
+      List<String> cc = rs.getColumnNames();
+      for (int i = 0; i < cc.size(); i++) {
+        ColumnType type = rs.getColumnType(i).get();
+        assertNull(type.getComplexTypeName());
+        final String val = row[i];
+        Object newVal;
+        if ("null".equals(val)) {
+          newVal = null;
+        } else {
+          switch (type.getType()) {
+            case STRING:
+              newVal = val;
+              break;
+            case LONG:
+              if ("".equals(val)) {
+                newVal = null;
+              } else if (longPattern.test(val)) {
+                newVal = Numbers.parseLong(val);
+              } else if ("0E-20".equals(val)) {
+                newVal = 0L;
+              } else if (doublePattern.test(val)) {
+                double d = Doubles.tryParse(val);
+                newVal = (long) d;
+              } else {
+                Function<String, DateTime> parser = 
TimestampParser.createTimestampParser("auto");
+                try {
+                  newVal = parser.apply(val);
+                }
+                catch (IllegalArgumentException iae) {
+                  LocalTime v = LocalTime.parse(val);
+                  newVal = v.getMillisOfDay();
+                }
+              }
+              break;

Review Comment:
   How about a flow such as below and avoid pattern matching? (0E-20 can be 
parsed using Doubles.tryParse)
   if empty ; value = null
   try Number.parseLong() 
     catch NFE
        try Doubes.parse
           catch NFE
              try timestampParsing
              



##########
sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java:
##########
@@ -36,98 +37,221 @@
 import org.apache.druid.data.input.impl.StringDimensionSchema;
 import org.apache.druid.jackson.DefaultObjectMapper;
 import org.apache.druid.java.util.common.Intervals;
+import org.apache.druid.java.util.common.Numbers;
 import org.apache.druid.java.util.common.RE;
-import org.apache.druid.java.util.common.logger.Logger;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.java.util.common.parsers.TimestampParser;
+import org.apache.druid.query.QueryContexts;
 import org.apache.druid.query.QueryRunnerFactoryConglomerate;
 import org.apache.druid.segment.IndexBuilder;
 import org.apache.druid.segment.QueryableIndex;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
 import org.apache.druid.segment.incremental.IncrementalIndexSchema;
 import org.apache.druid.segment.join.JoinableFactoryWrapper;
 import 
org.apache.druid.segment.writeout.OnHeapMemorySegmentWriteOutMediumFactory;
+import org.apache.druid.sql.calcite.NegativeTest.Modes;
+import org.apache.druid.sql.calcite.NegativeTest.NegativeTestProcessor;
+import org.apache.druid.sql.calcite.planner.PlannerConfig;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
 import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
 import org.apache.druid.timeline.DataSegment;
 import org.apache.druid.timeline.partition.NumberedShardSpec;
+import org.joda.time.DateTime;
+import org.joda.time.LocalTime;
 import org.junit.Assert;
+import org.junit.Rule;
 import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.rules.TestRule;
+import org.junit.runner.Description;
+import org.junit.runners.model.Statement;
 
 import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.reflect.Method;
 import java.net.URL;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Locale;
 import java.util.Map;
+import java.util.Set;
+import java.util.function.Predicate;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.fail;
 
 /**
- * These test cases are borrowed from the drill-test-framework at 
https://github.com/apache/drill-test-framework
+ * These test cases are borrowed from the drill-test-framework at
+ * https://github.com/apache/drill-test-framework
  * <p>
- * The Drill data sources are just accessing parquet files directly, we ingest 
and index the data first via JSON
- * (so that we avoid pulling in the parquet dependencies here) and then run 
the queries over that.
+ * The Drill data sources are just accessing parquet files directly, we ingest
+ * and index the data first via JSON (so that we avoid pulling in the parquet
+ * dependencies here) and then run the queries over that.
  * <p>
- * The setup of the ingestion is done via code in this class, so any new data 
source needs to be added through that
- * manner.  That said, these tests are primarily being added to bootstrap our 
own test coverage of window
- * functions, so it is believed that most iteration on tests will happen 
through the CalciteWindowQueryTest
- * instead of this class.
+ * The setup of the ingestion is done via code in this class, so any new data
+ * source needs to be added through that manner. That said, these tests are
+ * primarily being added to bootstrap our own test coverage of window 
functions,
+ * so it is believed that most iteration on tests will happen through the
+ * CalciteWindowQueryTest instead of this class.
  */
-@RunWith(Parameterized.class)
 public class DrillWindowQueryTest extends BaseCalciteQueryTest
 {
-  private static final Logger log = new Logger(DrillWindowQueryTest.class);
+  private static final ObjectMapper MAPPER = new DefaultObjectMapper();
+  private DrillTestCase testCase = null;
 
   static {
     NullHandling.initializeForTests();
   }
 
-  @Parameterized.Parameters(name = "{0}")
-  public static Object parametersForWindowQueryTest() throws Exception
+  @Rule
+  public TestRule disableWhenNonSqlCompat = DisableUnless.SQL_COMPATIBLE;
+
+  @Rule
+  public NegativeTestProcessor ignoreProcessor = new NegativeTestProcessor();
+
+  @Rule
+  public DrillTestCaseLoaderRule drillTestCaseRule = new 
DrillTestCaseLoaderRule();
+
+  @Test
+  public void ensureAllDeclared() throws Exception
   {
     final URL windowQueriesUrl = 
ClassLoader.getSystemResource("drill/window/queries/");
-    File windowFolder = new File(windowQueriesUrl.toURI());
-    int windowFolderPrefixSize = windowFolder.getAbsolutePath().length() + 1 
/* 1 for the ending slash */;
+    Path windowFolder = new File(windowQueriesUrl.toURI()).toPath();
 
-    return FileUtils
-        .streamFiles(windowFolder, true, "q")
+    Set<String> allCases = FileUtils
+        .streamFiles(windowFolder.toFile(), true, "q")
         .map(file -> {
-          final String filePath = file.getAbsolutePath();
-          return filePath.substring(windowFolderPrefixSize, filePath.length() 
- 2);
+          return windowFolder.relativize(file.toPath()).toString();
         })
-        .sorted()
-        .toArray();
+        .sorted().collect(Collectors.toSet());
+
+    for (Method method : DrillWindowQueryTest.class.getDeclaredMethods()) {
+      DrillTest ann = method.getAnnotation(DrillTest.class);
+      if (method.getAnnotation(Test.class) == null || ann == null) {
+        continue;
+      }
+      if (allCases.remove(ann.value() + ".q")) {
+        continue;
+      }
+      fail("found testcase referencing invalid file: " + method.getName());
+    }
+
+    for (String string : allCases) {
+      string = string.substring(0, string.lastIndexOf('.'));
+      System.out.printf(Locale.ENGLISH, "@%s( \"%s\" )\n"
+          + "@Test\n"
+          + "public void test_%s() {\n"
+          + "    windowQueryTest();\n"
+          + "}\n",
+          DrillTest.class.getSimpleName(),
+          string,
+          string.replace('/', '_'));
+    }

Review Comment:
   Is it needed, though? :) Just printing a list of files should be enough. 



##########
sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java:
##########
@@ -142,65 +266,220 @@ public SpecificSegmentsQuerySegmentWalker 
createQuerySegmentWalker(
         new LongDimensionSchema("col6"),
         new StringDimensionSchema("col7"),
         new StringDimensionSchema("col8"),
-        new StringDimensionSchema("col9")
-    );
+        new StringDimensionSchema("col9"));
+    attachIndex(
+        retVal,
+        "smlTbl.parquet",
+        // "col_int": 8122,
+        new LongDimensionSchema("col_int"),
+        // "col_bgint": 817200,
+        new LongDimensionSchema("col_bgint"),
+        // "col_char_2": "IN",
+        new StringDimensionSchema("col_char_2"),
+        // "col_vchar_52":
+        // "AXXXXXXXXXXXXXXXXXXXXXXXXXCXXXXXXXXXXXXXXXXXXXXXXXXB",
+        new StringDimensionSchema("col_vchar_52"),
+        // "col_tmstmp": 1409617682418,
+        new LongDimensionSchema("col_tmstmp"),
+        // "col_dt": 422717616000000,
+        new LongDimensionSchema("col_dt"),
+        // "col_booln": false,
+        new StringDimensionSchema("col_booln"),
+        // "col_dbl": 12900.48,
+        new DoubleDimensionSchema("col_dbl"),
+        // "col_tm": 33109170
+        new LongDimensionSchema("col_tm"));
+    attachIndex(
+        retVal,
+        "fewRowsAllData.parquet",
+        // "col0":12024,
+        new LongDimensionSchema("col0"),
+        // "col1":307168,
+        new LongDimensionSchema("col1"),
+        // "col2":"VT",
+        new StringDimensionSchema("col2"),
+        // "col3":"DXXXXXXXXXXXXXXXXXXXXXXXXXEXXXXXXXXXXXXXXXXXXXXXXXXF",
+        new StringDimensionSchema("col3"),
+        // "col4":1338596882419,
+        new LongDimensionSchema("col4"),
+        // "col5":422705433600000,
+        new LongDimensionSchema("col5"),
+        // "col6":true,
+        new StringDimensionSchema("col6"),
+        // "col7":3.95110006277E8,
+        new DoubleDimensionSchema("col7"),
+        // "col8":67465430
+        new LongDimensionSchema("col8"));
+    attachIndex(
+        retVal,
+        "t_alltype.parquet",
+        // "c1":1,
+        new LongDimensionSchema("c1"),
+        // "c2":592475043,
+        new LongDimensionSchema("c2"),
+        // "c3":616080519999272,
+        new LongDimensionSchema("c3"),
+        // "c4":"ObHeWTDEcbGzssDwPwurfs",
+        new StringDimensionSchema("c4"),
+        // "c5":"0sZxIfZ CGwTOaLWZ6nWkUNx",
+        new StringDimensionSchema("c5"),
+        // "c6":1456290852307,
+        new LongDimensionSchema("c6"),
+        // "c7":421426627200000,
+        new LongDimensionSchema("c7"),
+        // "c8":true,
+        new StringDimensionSchema("c8"),
+        // "c9":0.626179100469
+        new DoubleDimensionSchema("c9"));
 
     return retVal;
   }
 
-  @Test
+  public class TextualResultsVerifier implements ResultsVerifier
+  {
+    protected final List<String[]> expectedResultsText;
+    @Nullable
+    protected final RowSignature expectedResultRowSignature;
+    private RowSignature currentRowSignature;
+
+    public TextualResultsVerifier(List<String[]> expectedResultsString, 
RowSignature expectedSignature)
+    {
+      this.expectedResultsText = expectedResultsString;
+      this.expectedResultRowSignature = expectedSignature;
+    }
+
+    @Override
+    public void verifyRowSignature(RowSignature rowSignature)
+    {
+      if (expectedResultRowSignature != null) {
+        Assert.assertEquals(expectedResultRowSignature, rowSignature);
+      }
+      currentRowSignature = rowSignature;
+    }
+
+    @Override
+    public void verify(String sql, List<Object[]> results)
+    {
+      List<Object[]> expectedResults = parseResults(currentRowSignature, 
expectedResultsText);
+      try {
+        Assert.assertEquals(StringUtils.format("result count: %s", sql), 
expectedResultsText.size(), results.size());
+        if (!isOrdered(sql)) {
+          results.sort(new ArrayRowCmp());
+          expectedResults.sort(new ArrayRowCmp());
+        } else {
+          assertResultsEquals(sql, expectedResults, results);
+        }
+      }
+      catch (AssertionError e) {
+        displayResults(expectedResults);
+        System.out.println("query: " + sql);
+        displayResults(results);
+        throw e;
+      }
+    }
+
+    private boolean isOrdered(String sql)
+    {
+      // FIXME: SqlToRelConverter.isOrdered(null) would be better
+      sql = sql.toLowerCase(Locale.ENGLISH).replace('\n', ' ');
+      sql = sql.substring(sql.lastIndexOf(')'));
+      return sql.contains("order");
+    }

Review Comment:
   Would it be too much work to set this manually on a test-by-test basis? 



##########
sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java:
##########
@@ -142,65 +266,220 @@ public SpecificSegmentsQuerySegmentWalker 
createQuerySegmentWalker(
         new LongDimensionSchema("col6"),
         new StringDimensionSchema("col7"),
         new StringDimensionSchema("col8"),
-        new StringDimensionSchema("col9")
-    );
+        new StringDimensionSchema("col9"));
+    attachIndex(
+        retVal,
+        "smlTbl.parquet",
+        // "col_int": 8122,
+        new LongDimensionSchema("col_int"),
+        // "col_bgint": 817200,
+        new LongDimensionSchema("col_bgint"),
+        // "col_char_2": "IN",
+        new StringDimensionSchema("col_char_2"),
+        // "col_vchar_52":
+        // "AXXXXXXXXXXXXXXXXXXXXXXXXXCXXXXXXXXXXXXXXXXXXXXXXXXB",
+        new StringDimensionSchema("col_vchar_52"),
+        // "col_tmstmp": 1409617682418,
+        new LongDimensionSchema("col_tmstmp"),
+        // "col_dt": 422717616000000,
+        new LongDimensionSchema("col_dt"),
+        // "col_booln": false,
+        new StringDimensionSchema("col_booln"),
+        // "col_dbl": 12900.48,
+        new DoubleDimensionSchema("col_dbl"),
+        // "col_tm": 33109170
+        new LongDimensionSchema("col_tm"));
+    attachIndex(
+        retVal,
+        "fewRowsAllData.parquet",
+        // "col0":12024,
+        new LongDimensionSchema("col0"),
+        // "col1":307168,
+        new LongDimensionSchema("col1"),
+        // "col2":"VT",
+        new StringDimensionSchema("col2"),
+        // "col3":"DXXXXXXXXXXXXXXXXXXXXXXXXXEXXXXXXXXXXXXXXXXXXXXXXXXF",
+        new StringDimensionSchema("col3"),
+        // "col4":1338596882419,
+        new LongDimensionSchema("col4"),
+        // "col5":422705433600000,
+        new LongDimensionSchema("col5"),
+        // "col6":true,
+        new StringDimensionSchema("col6"),
+        // "col7":3.95110006277E8,
+        new DoubleDimensionSchema("col7"),
+        // "col8":67465430
+        new LongDimensionSchema("col8"));
+    attachIndex(
+        retVal,
+        "t_alltype.parquet",
+        // "c1":1,
+        new LongDimensionSchema("c1"),
+        // "c2":592475043,
+        new LongDimensionSchema("c2"),
+        // "c3":616080519999272,
+        new LongDimensionSchema("c3"),
+        // "c4":"ObHeWTDEcbGzssDwPwurfs",
+        new StringDimensionSchema("c4"),
+        // "c5":"0sZxIfZ CGwTOaLWZ6nWkUNx",
+        new StringDimensionSchema("c5"),
+        // "c6":1456290852307,
+        new LongDimensionSchema("c6"),
+        // "c7":421426627200000,
+        new LongDimensionSchema("c7"),
+        // "c8":true,
+        new StringDimensionSchema("c8"),
+        // "c9":0.626179100469
+        new DoubleDimensionSchema("c9"));
 
     return retVal;
   }
 
-  @Test
+  public class TextualResultsVerifier implements ResultsVerifier
+  {
+    protected final List<String[]> expectedResultsText;
+    @Nullable
+    protected final RowSignature expectedResultRowSignature;
+    private RowSignature currentRowSignature;
+
+    public TextualResultsVerifier(List<String[]> expectedResultsString, 
RowSignature expectedSignature)
+    {
+      this.expectedResultsText = expectedResultsString;
+      this.expectedResultRowSignature = expectedSignature;
+    }
+
+    @Override
+    public void verifyRowSignature(RowSignature rowSignature)
+    {
+      if (expectedResultRowSignature != null) {
+        Assert.assertEquals(expectedResultRowSignature, rowSignature);
+      }
+      currentRowSignature = rowSignature;
+    }
+
+    @Override
+    public void verify(String sql, List<Object[]> results)
+    {
+      List<Object[]> expectedResults = parseResults(currentRowSignature, 
expectedResultsText);
+      try {
+        Assert.assertEquals(StringUtils.format("result count: %s", sql), 
expectedResultsText.size(), results.size());
+        if (!isOrdered(sql)) {
+          results.sort(new ArrayRowCmp());
+          expectedResults.sort(new ArrayRowCmp());
+        } else {
+          assertResultsEquals(sql, expectedResults, results);
+        }
+      }
+      catch (AssertionError e) {
+        displayResults(expectedResults);
+        System.out.println("query: " + sql);
+        displayResults(results);

Review Comment:
   ```suggestion
           System.out.println("query: " + sql);
           displayResults(expectedResults);
           displayResults(results);
   ```



##########
sql/src/test/java/org/apache/druid/sql/calcite/DrillWindowQueryTest.java:
##########
@@ -36,98 +37,221 @@
 import org.apache.druid.data.input.impl.StringDimensionSchema;
 import org.apache.druid.jackson.DefaultObjectMapper;
 import org.apache.druid.java.util.common.Intervals;
+import org.apache.druid.java.util.common.Numbers;
 import org.apache.druid.java.util.common.RE;
-import org.apache.druid.java.util.common.logger.Logger;
+import org.apache.druid.java.util.common.StringUtils;
+import org.apache.druid.java.util.common.parsers.TimestampParser;
+import org.apache.druid.query.QueryContexts;
 import org.apache.druid.query.QueryRunnerFactoryConglomerate;
 import org.apache.druid.segment.IndexBuilder;
 import org.apache.druid.segment.QueryableIndex;
+import org.apache.druid.segment.column.ColumnType;
+import org.apache.druid.segment.column.RowSignature;
 import org.apache.druid.segment.incremental.IncrementalIndexSchema;
 import org.apache.druid.segment.join.JoinableFactoryWrapper;
 import 
org.apache.druid.segment.writeout.OnHeapMemorySegmentWriteOutMediumFactory;
+import org.apache.druid.sql.calcite.NegativeTest.Modes;
+import org.apache.druid.sql.calcite.NegativeTest.NegativeTestProcessor;
+import org.apache.druid.sql.calcite.planner.PlannerConfig;
+import org.apache.druid.sql.calcite.planner.PlannerContext;
 import org.apache.druid.sql.calcite.util.SpecificSegmentsQuerySegmentWalker;
 import org.apache.druid.timeline.DataSegment;
 import org.apache.druid.timeline.partition.NumberedShardSpec;
+import org.joda.time.DateTime;
+import org.joda.time.LocalTime;
 import org.junit.Assert;
+import org.junit.Rule;
 import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
+import org.junit.rules.TestRule;
+import org.junit.runner.Description;
+import org.junit.runners.model.Statement;
 
 import javax.annotation.Nonnull;
+import javax.annotation.Nullable;
+
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
+import java.lang.annotation.ElementType;
+import java.lang.annotation.Retention;
+import java.lang.annotation.RetentionPolicy;
+import java.lang.annotation.Target;
+import java.lang.reflect.Method;
 import java.net.URL;
 import java.nio.charset.StandardCharsets;
+import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Comparator;
+import java.util.List;
+import java.util.Locale;
 import java.util.Map;
+import java.util.Set;
+import java.util.function.Predicate;
+import java.util.regex.Pattern;
+import java.util.stream.Collectors;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.fail;
 
 /**
- * These test cases are borrowed from the drill-test-framework at 
https://github.com/apache/drill-test-framework
+ * These test cases are borrowed from the drill-test-framework at
+ * https://github.com/apache/drill-test-framework
  * <p>
- * The Drill data sources are just accessing parquet files directly, we ingest 
and index the data first via JSON
- * (so that we avoid pulling in the parquet dependencies here) and then run 
the queries over that.
+ * The Drill data sources are just accessing parquet files directly, we ingest
+ * and index the data first via JSON (so that we avoid pulling in the parquet
+ * dependencies here) and then run the queries over that.
  * <p>
- * The setup of the ingestion is done via code in this class, so any new data 
source needs to be added through that
- * manner.  That said, these tests are primarily being added to bootstrap our 
own test coverage of window
- * functions, so it is believed that most iteration on tests will happen 
through the CalciteWindowQueryTest
- * instead of this class.
+ * The setup of the ingestion is done via code in this class, so any new data
+ * source needs to be added through that manner. That said, these tests are
+ * primarily being added to bootstrap our own test coverage of window 
functions,
+ * so it is believed that most iteration on tests will happen through the
+ * CalciteWindowQueryTest instead of this class.
  */
-@RunWith(Parameterized.class)
 public class DrillWindowQueryTest extends BaseCalciteQueryTest
 {
-  private static final Logger log = new Logger(DrillWindowQueryTest.class);
+  private static final ObjectMapper MAPPER = new DefaultObjectMapper();
+  private DrillTestCase testCase = null;
 
   static {
     NullHandling.initializeForTests();
   }
 
-  @Parameterized.Parameters(name = "{0}")
-  public static Object parametersForWindowQueryTest() throws Exception
+  @Rule
+  public TestRule disableWhenNonSqlCompat = DisableUnless.SQL_COMPATIBLE;
+
+  @Rule
+  public NegativeTestProcessor ignoreProcessor = new NegativeTestProcessor();
+
+  @Rule
+  public DrillTestCaseLoaderRule drillTestCaseRule = new 
DrillTestCaseLoaderRule();
+
+  @Test
+  public void ensureAllDeclared() throws Exception
   {
     final URL windowQueriesUrl = 
ClassLoader.getSystemResource("drill/window/queries/");
-    File windowFolder = new File(windowQueriesUrl.toURI());
-    int windowFolderPrefixSize = windowFolder.getAbsolutePath().length() + 1 
/* 1 for the ending slash */;
+    Path windowFolder = new File(windowQueriesUrl.toURI()).toPath();
 
-    return FileUtils
-        .streamFiles(windowFolder, true, "q")
+    Set<String> allCases = FileUtils
+        .streamFiles(windowFolder.toFile(), true, "q")
         .map(file -> {
-          final String filePath = file.getAbsolutePath();
-          return filePath.substring(windowFolderPrefixSize, filePath.length() 
- 2);
+          return windowFolder.relativize(file.toPath()).toString();
         })
-        .sorted()
-        .toArray();
+        .sorted().collect(Collectors.toSet());
+
+    for (Method method : DrillWindowQueryTest.class.getDeclaredMethods()) {
+      DrillTest ann = method.getAnnotation(DrillTest.class);
+      if (method.getAnnotation(Test.class) == null || ann == null) {
+        continue;
+      }
+      if (allCases.remove(ann.value() + ".q")) {
+        continue;
+      }
+      fail("found testcase referencing invalid file: " + method.getName());
+    }
+
+    for (String string : allCases) {
+      string = string.substring(0, string.lastIndexOf('.'));
+      System.out.printf(Locale.ENGLISH, "@%s( \"%s\" )\n"
+          + "@Test\n"
+          + "public void test_%s() {\n"
+          + "    windowQueryTest();\n"
+          + "}\n",
+          DrillTest.class.getSimpleName(),
+          string,
+          string.replace('/', '_'));
+    }
+    assertEquals("found some non-declared tests; please add the above!", 0, 
allCases.size());
   }
 
-  private static final ObjectMapper MAPPER = new DefaultObjectMapper();
-  private final String filename;
+  @Retention(RetentionPolicy.RUNTIME)
+  @Target({ElementType.METHOD})
+  public @interface DrillTest
+  {
+    /**
+     * Name of the file this test should execute.
+     */
+    String value();
+  }
 
-  public DrillWindowQueryTest(
-      String filename
-  )
+  class DrillTestCaseLoaderRule implements TestRule
   {
-    this.filename = filename;
+
+    @Override
+    public Statement apply(Statement base, Description description)
+    {
+      DrillTest annotation = description.getAnnotation(DrillTest.class);
+      testCase = (annotation == null) ? null : new 
DrillTestCase(annotation.value());
+      return base;
+    }
+  }
+
+  static class DrillTestCase
+  {
+    private final String query;
+    private final List<String[]> results;
+    private String filename;
+
+    public DrillTestCase(String filename)
+    {
+      try {
+        this.filename = filename;
+        this.query = readStringFromResource(".q");
+        String resultsStr = readStringFromResource(".e");
+        String[] lines = resultsStr.split("\n");
+        results = new ArrayList<>();
+        if (resultsStr.length() > 0) {
+          for (String string : lines) {
+            String[] cols = string.split("\t");
+            results.add(cols);
+          }
+        }
+      }
+      catch (Exception e) {
+        throw new RuntimeException("Encountered exception while loading 
testcase", e);

Review Comment:
   can you add the filename to this error message? 



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]


Reply via email to