This is an automated email from the ASF dual-hosted git repository.
rongr pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/pinot.git
The following commit(s) were added to refs/heads/master by this push:
new 7b98e30441 [multistage] expand test framework features (#9794)
7b98e30441 is described below
commit 7b98e304415c1a9df880b65e4214e6e202d92d86
Author: Rong Rong <[email protected]>
AuthorDate: Mon Nov 14 14:40:25 2022 -0800
[multistage] expand test framework features (#9794)
* allow multiple SQLs to run at the same table input setup
* allow output checkers, so we don't need to load data into H2 if not
needed.
* also requires a "requireH2" flag from extra properties in order to use H2
as the expected row generator.
Co-authored-by: Rong Rong <[email protected]>
---
.../pinot/query/runtime/QueryRunnerTestBase.java | 53 ++++++++-----
.../runtime/queries/ResourceBasedQueriesTest.java | 91 +++++++++++++++-------
.../src/test/resources/queries/BasicQuery.json | 86 ++++++++++++--------
.../src/test/resources/queries/SpecialSyntax.json | 45 +++++++++++
4 files changed, 197 insertions(+), 78 deletions(-)
diff --git
a/pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/QueryRunnerTestBase.java
b/pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/QueryRunnerTestBase.java
index 8a47075a80..86d6f01b43 100644
---
a/pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/QueryRunnerTestBase.java
+++
b/pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/QueryRunnerTestBase.java
@@ -28,6 +28,7 @@ import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
+import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
@@ -157,9 +158,9 @@ public abstract class QueryRunnerTestBase extends
QueryTestSet {
// --------------------------------------------------------------------------
// TEST CASES PREP
// --------------------------------------------------------------------------
- protected Schema constructSchema(String schemaName, List<ColumnAndType>
columnAndTypes) {
+ protected Schema constructSchema(String schemaName,
List<QueryTestCase.ColumnAndType> columnAndTypes) {
Schema.SchemaBuilder builder = new Schema.SchemaBuilder();
- for (ColumnAndType columnAndType : columnAndTypes) {
+ for (QueryTestCase.ColumnAndType columnAndType : columnAndTypes) {
builder.addSingleValueDimension(columnAndType._name,
FieldSpec.DataType.valueOf(columnAndType._type));
}
// TODO: ts is built-in, but we should allow user overwrite
@@ -168,13 +169,13 @@ public abstract class QueryRunnerTestBase extends
QueryTestSet {
return builder.build();
}
- protected List<GenericRow> toRow(List<ColumnAndType> columnAndTypes,
List<List<Object>> value) {
+ protected List<GenericRow> toRow(List<QueryTestCase.ColumnAndType>
columnAndTypes, List<List<Object>> value) {
List<GenericRow> result = new ArrayList<>(value.size());
for (int rowId = 0; rowId < value.size(); rowId++) {
GenericRow row = new GenericRow();
List<Object> rawRow = value.get(rowId);
int colId = 0;
- for (ColumnAndType columnAndType : columnAndTypes) {
+ for (QueryTestCase.ColumnAndType columnAndType : columnAndTypes) {
row.putValue(columnAndType._name, rawRow.get(colId++));
}
// TODO: ts is built-in, but we should allow user overwrite
@@ -256,22 +257,38 @@ public abstract class QueryRunnerTestBase extends
QueryTestSet {
}
public static class QueryTestCase {
- @JsonProperty("sql")
- public String _sql;
- @JsonProperty("description")
- public String _description;
+ public static final String REQUIRED_H2_KEY = "requireH2";
+ public static final String BLOCK_SIZE_KEY = "blockSize";
+ public static final String SERVER_ASSIGN_STRATEGY_KEY =
"serverSelectionStrategy";
+
@JsonProperty("tables")
- public Map<String, List<ColumnAndType>> _tables;
- @JsonProperty("inputs")
- public Map<String, List<List<Object>>> _inputs;
+ public Map<String, Table> _tables;
+ @JsonProperty("queries")
+ public List<Query> _queries;
@JsonProperty("extraProps")
- public Map<String, Object> _extraProps;
- }
+ public Map<String, Object> _extraProps = Collections.emptyMap();
+
+ public static class Table {
+ @JsonProperty("schema")
+ public List<ColumnAndType> _schema;
+ @JsonProperty("inputs")
+ public List<List<Object>> _inputs;
+ }
- public static class ColumnAndType {
- @JsonProperty("name")
- String _name;
- @JsonProperty("type")
- String _type;
+ public static class Query {
+ @JsonProperty("sql")
+ public String _sql;
+ @JsonProperty("description")
+ public String _description;
+ @JsonProperty("outputs")
+ public List<List<Object>> _outputs = Collections.emptyList();
+ }
+
+ public static class ColumnAndType {
+ @JsonProperty("name")
+ String _name;
+ @JsonProperty("type")
+ String _type;
+ }
}
}
diff --git
a/pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/queries/ResourceBasedQueriesTest.java
b/pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/queries/ResourceBasedQueriesTest.java
index aba688f058..b0f8adb4b8 100644
---
a/pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/queries/ResourceBasedQueriesTest.java
+++
b/pinot-query-runtime/src/test/java/org/apache/pinot/query/runtime/queries/ResourceBasedQueriesTest.java
@@ -23,6 +23,7 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList;
import java.io.File;
import java.net.URL;
+import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@@ -54,7 +55,8 @@ public class ResourceBasedQueriesTest extends
QueryRunnerTestBase {
private static final String QUERY_TEST_RESOURCE_FOLDER = "queries";
// TODO: refactor and load test dynamically using the reousrce utils in
pinot-tools
private static final List<String> QUERY_TEST_RESOURCE_FILES =
ImmutableList.of(
- "BasicQuery.json"
+ "BasicQuery.json",
+ "SpecialSyntax.json"
);
@BeforeClass
@@ -76,30 +78,31 @@ public class ResourceBasedQueriesTest extends
QueryRunnerTestBase {
QueryTestCase testCase = testCaseEntry.getValue();
// table will be registered on both servers.
Map<String, Schema> schemaMap = new HashMap<>();
- for (Map.Entry<String, List<ColumnAndType>> e :
testCase._tables.entrySet()) {
- String tableName = testCaseName + "_" + e.getKey();
+ for (Map.Entry<String, QueryTestCase.Table> tableEntry :
testCase._tables.entrySet()) {
+ String tableName = testCaseName + "_" + tableEntry.getKey();
// TODO: able to choose table type, now default to OFFLINE
String tableNameWithType =
TableNameBuilder.forType(TableType.OFFLINE).tableNameWithType(tableName);
- org.apache.pinot.spi.data.Schema pinotSchema =
constructSchema(tableName, e.getValue());
+ org.apache.pinot.spi.data.Schema pinotSchema =
constructSchema(tableName, tableEntry.getValue()._schema);
schemaMap.put(tableName, pinotSchema);
factory1.registerTable(pinotSchema, tableNameWithType);
factory2.registerTable(pinotSchema, tableNameWithType);
- }
- for (Map.Entry<String, List<List<Object>>> e :
testCase._inputs.entrySet()) {
- List<ColumnAndType> columnAndTypes = testCase._tables.get(e.getKey());
- String tableName = testCaseName + "_" + e.getKey();
- String tableNameWithType =
TableNameBuilder.forType(TableType.OFFLINE).tableNameWithType(tableName);
+ List<QueryTestCase.ColumnAndType> columnAndTypes =
tableEntry.getValue()._schema;
// TODO: able to select add rows to server1 or server2 (now default
server1)
// TODO: able to select add rows to existing segment or create new one
(now default create one segment)
- factory1.addSegment(tableNameWithType, toRow(columnAndTypes,
e.getValue()));
+ factory1.addSegment(tableNameWithType, toRow(columnAndTypes,
tableEntry.getValue()._inputs));
}
- // add all the tables to H2
- for (Map.Entry<String, Schema> e: schemaMap.entrySet()) {
- String tableName = e.getKey();
- Schema schema = e.getValue();
- addTableToH2(tableName, schema);
- addDataToH2(tableName, schema,
factory1.buildTableRowsMap().get(tableName));
- addDataToH2(tableName, schema,
factory2.buildTableRowsMap().get(tableName));
+
+ // Process extra properties
+ Map<String, Object> extraProps = testCaseEntry.getValue()._extraProps;
+ if ((boolean) extraProps.getOrDefault(QueryTestCase.REQUIRED_H2_KEY,
false)) {
+ // Add all test cases without explicit output to the tables on H2
+ for (Map.Entry<String, Schema> e : schemaMap.entrySet()) {
+ String tableName = e.getKey();
+ Schema schema = e.getValue();
+ addTableToH2(tableName, schema);
+ addDataToH2(tableName, schema,
factory1.buildTableRowsMap().get(tableName));
+ addDataToH2(tableName, schema,
factory2.buildTableRowsMap().get(tableName));
+ }
}
}
QueryServerEnclosure server1 = new QueryServerEnclosure(factory1);
@@ -136,8 +139,8 @@ public class ResourceBasedQueriesTest extends
QueryRunnerTestBase {
}
// TODO: name the test using testCaseName for testng reports
- @Test(dataProvider = "testResourceQueryTestCaseProvider")
- public void testQueryTestCases(String testCaseName, String sql)
+ @Test(dataProvider = "testResourceQueryTestCaseProviderInputOnly")
+ public void testQueryTestCasesWithH2(String testCaseName, String sql)
throws Exception {
// query pinot
List<Object[]> resultRows = queryRunner(sql);
@@ -146,19 +149,54 @@ public class ResourceBasedQueriesTest extends
QueryRunnerTestBase {
compareRowEquals(resultRows, expectedRows);
}
+ @Test(dataProvider = "testResourceQueryTestCaseProviderBoth")
+ public void testQueryTestCasesWithOutput(String testCaseName, String sql,
List<Object[]> expectedRows)
+ throws Exception {
+ List<Object[]> resultRows = queryRunner(sql);
+ compareRowEquals(resultRows, expectedRows);
+ }
+
+ @DataProvider
+ private static Object[][] testResourceQueryTestCaseProviderBoth()
+ throws Exception {
+ Map<String, QueryTestCase> testCaseMap = getTestCases();
+ List<Object[]> providerContent = new ArrayList<>();
+ for (Map.Entry<String, QueryTestCase> testCaseEntry :
testCaseMap.entrySet()) {
+ String testCaseName = testCaseEntry.getKey();
+ List<QueryTestCase.Query> queryCases = testCaseEntry.getValue()._queries;
+ for (QueryTestCase.Query queryCase : queryCases) {
+ if (queryCase._outputs != null && !queryCase._outputs.isEmpty()) {
+ String sql = replaceTableName(testCaseName, queryCase._sql);
+ List<List<Object>> orgRows = queryCase._outputs;
+ List<Object[]> expectedRows = new ArrayList<>(orgRows.size());
+ for (List<Object> objs : orgRows) {
+ expectedRows.add(objs.toArray());
+ }
+ Object[] testEntry = new Object[]{testCaseName, sql, expectedRows};
+ providerContent.add(testEntry);
+ }
+ }
+ }
+ return providerContent.toArray(new Object[][]{});
+ }
+
@DataProvider
- private static Object[][] testResourceQueryTestCaseProvider()
+ private static Object[][] testResourceQueryTestCaseProviderInputOnly()
throws Exception {
Map<String, QueryTestCase> testCaseMap = getTestCases();
- Object[][] providerContent = new Object[testCaseMap.size()][];
- int idx = 0;
+ List<Object[]> providerContent = new ArrayList<>();
for (Map.Entry<String, QueryTestCase> testCaseEntry :
testCaseMap.entrySet()) {
String testCaseName = testCaseEntry.getKey();
- String testSql = replaceTableName(testCaseName,
testCaseEntry.getValue()._sql);
- Object[] testEntry = new Object[]{testCaseName, testSql};
- providerContent[idx++] = testEntry;
+ List<QueryTestCase.Query> queryCases = testCaseEntry.getValue()._queries;
+ for (QueryTestCase.Query queryCase : queryCases) {
+ if (queryCase._outputs == null || queryCase._outputs.isEmpty()) {
+ String sql = replaceTableName(testCaseName, queryCase._sql);
+ Object[] testEntry = new Object[]{testCaseName, sql};
+ providerContent.add(testEntry);
+ }
+ }
}
- return providerContent;
+ return providerContent.toArray(new Object[][]{});
}
private static String replaceTableName(String testCaseName, String sql) {
@@ -166,6 +204,7 @@ public class ResourceBasedQueriesTest extends
QueryRunnerTestBase {
return matcher.replaceAll(testCaseName + "_$1");
}
+ // TODO: cache this test case generator
private static Map<String, QueryTestCase> getTestCases()
throws Exception {
Map<String, QueryTestCase> testCaseMap = new HashMap<>();
diff --git a/pinot-query-runtime/src/test/resources/queries/BasicQuery.json
b/pinot-query-runtime/src/test/resources/queries/BasicQuery.json
index be8b4cfea6..ba96d64297 100644
--- a/pinot-query-runtime/src/test/resources/queries/BasicQuery.json
+++ b/pinot-query-runtime/src/test/resources/queries/BasicQuery.json
@@ -1,49 +1,67 @@
{
"basic_test": {
- "description": "basic test case example",
- "sql": "SELECT * FROM {tbl}",
"tables": {
- "tbl": [
- {"name": "col1", "type": "STRING"},
- {"name": "col2", "type": "INT"}
- ]
+ "tbl" : {
+ "schema": [
+ {"name": "col1", "type": "STRING"},
+ {"name": "col2", "type": "INT"}
+ ],
+ "inputs": [
+ ["foo", 1],
+ ["bar", 2]
+ ]
+ }
},
- "inputs": {
- "tbl": [
- ["foo", 1],
- ["bar", 2]
- ]
+ "queries": [
+ {
+ "description": "basic test case example",
+ "sql": "SELECT * FROM {tbl}"
+ }
+ ],
+ "extraProps": {
+ "requireH2": true
}
},
"framework_test": {
- "description": "basic test demonstrate the configurable element in test
case framework",
- "sql": "SELECT {tbl1}.col1, {tbl1}.col2, {tbl2}.col3 FROM {tbl1} JOIN
{tbl2} ON {tbl1}.col1 = {tbl2}.col1",
"tables": {
- "tbl1": [
- {"name": "col1", "type": "STRING"},
- {"name": "col2", "type": "INT"}
- ],
- "tbl2": [
- {"name": "col1", "type": "STRING"},
- {"name": "col2", "type": "INT"},
- {"name": "col3", "type": "DOUBLE"}
- ]
+ "tbl1" : {
+ "schema": [
+ {"name": "col1", "type": "STRING"},
+ {"name": "col2", "type": "INT"}
+ ],
+ "inputs": [
+ ["foo", 1],
+ ["bar", 2]
+ ]
+ },
+ "tbl2" : {
+ "schema": [
+ {"name": "col1", "type": "STRING"},
+ {"name": "col2", "type": "INT"},
+ {"name": "col3", "type": "DOUBLE"}
+ ],
+ "inputs": [
+ ["foo", 1, 3.1416],
+ ["bar", 2, 2.7183]
+ ]
+ }
},
+ "queries": [
+ {
+ "description": "basic test demonstrate the configurable element in
test case framework",
+ "sql": "SELECT {tbl1}.col1, {tbl1}.col2, {tbl2}.col3 FROM {tbl1} JOIN
{tbl2} ON {tbl1}.col1 = {tbl2}.col1"
+ },
+ {
+ "description": "bla bla",
+ "sql": "SELECT {tbl1}.col1, {tbl1}.col2, COUNT(*) FROM {tbl1} JOIN
{tbl2} ON {tbl1}.col1 = {tbl2}.col1 GROUP BY {tbl1}.col1, {tbl1}.col2"
+ }
+ ],
"extraProps": {
- "comment": "TODO: these demonstrate how extra properties are used, but
not implemented",
+ "requireH2": true,
+ "comment": "TODO: these demonstrate how extra properties are used but
the following keys are not supported yet",
"blockSize": 2,
"serverSelectionStrategy": "RANDOM"
- },
- "inputs": {
- "tbl1": [
- ["foo", 1],
- ["bar", 2]
- ],
- "tbl2": [
- ["foo", 1, 3.1416],
- ["bar", 2, 2.2183]
- ]
}
}
-}
\ No newline at end of file
+}
diff --git a/pinot-query-runtime/src/test/resources/queries/SpecialSyntax.json
b/pinot-query-runtime/src/test/resources/queries/SpecialSyntax.json
new file mode 100644
index 0000000000..9f1020cd51
--- /dev/null
+++ b/pinot-query-runtime/src/test/resources/queries/SpecialSyntax.json
@@ -0,0 +1,45 @@
+{
+ "special_syntax": {
+ "tables": {
+ "tbl1" : {
+ "schema": [
+ {"name": "col1", "type": "STRING"},
+ {"name": "col2", "type": "STRING"},
+ {"name": "col3", "type": "INT"}
+ ],
+ "inputs": [
+ ["foo", "alice", 1],
+ ["bar", "bob", 2]
+ ]
+ },
+ "tbl2" : {
+ "schema": [
+ {"name": "col1", "type": "STRING"},
+ {"name": "col2", "type": "STRING"},
+ {"name": "col3", "type": "INT"}
+ ],
+ "inputs": [
+ ["foo", "bob", 3],
+ ["alice", "alice", 4]
+ ]
+ }
+ },
+ "queries": [
+ {
+ "description": "test join with using clause",
+ "sql": "SELECT {tbl1}.col1, {tbl2}.col3 FROM {tbl1} JOIN {tbl2} USING
(col1)",
+ "outputs": [
+ ["foo", 3]
+ ]
+ },
+ {
+ "description": "test join with using clause but join with columns not
being selected",
+ "sql": "SELECT {tbl1}.col1, {tbl2}.col3 FROM {tbl1} JOIN {tbl2} USING
(col2)",
+ "outputs": [
+ ["bar", 3],
+ ["foo", 4]
+ ]
+ }
+ ]
+ }
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]