alpreu commented on a change in pull request #19224:
URL: https://github.com/apache/flink/pull/19224#discussion_r839542592
##########
File path:
flink-formats/flink-csv/src/test/java/org/apache/flink/formats/csv/TableCsvFormatITCase.java
##########
@@ -18,32 +18,78 @@
package org.apache.flink.formats.csv;
+import org.apache.flink.connector.file.table.FileSystemConnectorOptions;
+import org.apache.flink.connector.file.table.FileSystemTableFactory;
import org.apache.flink.formats.common.TimeFormats;
+import org.apache.flink.table.api.EnvironmentSettings;
+import org.apache.flink.table.api.Schema;
+import org.apache.flink.table.api.TableDescriptor;
+import org.apache.flink.table.api.TableEnvironment;
+import org.apache.flink.table.planner.factories.TestValuesTableFactory;
import org.apache.flink.table.planner.runtime.utils.TestData;
import org.apache.flink.table.planner.utils.JavaScalaConversionUtil;
-import org.apache.flink.table.planner.utils.JsonPlanTestBase;
+import org.apache.flink.test.util.AbstractTestBase;
+import org.apache.flink.types.Row;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Rule;
import org.junit.Test;
+import org.junit.rules.ExpectedException;
+
+import javax.annotation.Nullable;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
+import static org.apache.flink.table.api.DataTypes.BIGINT;
+import static org.apache.flink.table.api.DataTypes.INT;
+import static org.apache.flink.table.api.DataTypes.STRING;
+import static org.apache.flink.table.api.DataTypes.TIMESTAMP;
import static org.apache.flink.table.utils.DateTimeUtils.toLocalDateTime;
+import static org.apache.flink.util.Preconditions.checkNotNull;
+import static org.assertj.core.api.Assertions.assertThat;
/** Tests for the CSV file format. */
-public class TableCsvFormatITCase extends JsonPlanTestBase {
+public class TableCsvFormatITCase extends AbstractTestBase {
+
+ @Rule public ExpectedException exception = ExpectedException.none();
+
+ private TableEnvironment tableEnv;
+
+ @Before
+ public void setup() throws Exception {
+ tableEnv =
TableEnvironment.create(EnvironmentSettings.inStreamingMode());
+ }
+
+ @After
+ public void after() {
+ TestValuesTableFactory.clearAllData();
+ }
@Test
public void testProjectPushDown() throws Exception {
List<String> data = Arrays.asList("1,1,hi", "2,1,hello", "3,2,hello
world");
- createSourceTable("MyTable", data, "a bigint", "b int not null", "c
varchar");
- File sinkPath = createSinkTable("MySink", "a bigint", "c varchar");
+
+ Schema sourceSchema =
+ Schema.newBuilder()
+ .column("a", BIGINT())
+ .column("b", INT())
+ .column("c", STRING())
+ .build();
+
+ createSourceTable("MyTable", data, sourceSchema);
+
+ Schema sinkSchema = Schema.newBuilder().column("a",
BIGINT()).column("c", STRING()).build();
+
+ File sinkPath = createSinkTable("MySink", sinkSchema);
Review comment:
Can we just reuse sourceSchema here?
##########
File path:
flink-formats/flink-csv/src/test/java/org/apache/flink/formats/csv/TableCsvFormatITCase.java
##########
@@ -165,30 +269,87 @@ private static String formatSqlTimestamp(long timestamp) {
return
TimeFormats.SQL_TIMESTAMP_FORMAT.format(toLocalDateTime(timestamp));
}
- private void createSourceTable(String tableName, List<String> data,
String... fieldNameAndTypes)
+ private void createSourceTable(String tableName, List<String> data, Schema
schema)
throws IOException {
+
File sourceFile = TEMPORARY_FOLDER.newFile();
Collections.shuffle(data);
Files.write(sourceFile.toPath(), String.join("\n", data).getBytes());
- Map<String, String> properties = new HashMap<>();
- properties.put("connector", "filesystem");
- properties.put("path", sourceFile.getAbsolutePath());
- properties.put("format", "csv");
-
- createTestSourceTable(tableName, fieldNameAndTypes, null, properties);
+ tableEnv.createTemporaryTable(
+ tableName,
+ TableDescriptor.forConnector(FileSystemTableFactory.IDENTIFIER)
+ .option(FileSystemConnectorOptions.PATH,
sourceFile.getAbsolutePath())
+ .format(CsvCommons.IDENTIFIER)
+ .schema(schema)
+ .build());
}
- private File createSinkTable(String tableName, String...
fieldNameAndTypes) throws IOException {
+ private File createSinkTable(String tableName, Schema schema) throws
IOException {
File sinkPath = TEMPORARY_FOLDER.newFolder();
- Map<String, String> properties = new HashMap<>();
- properties.put("connector", "filesystem");
- properties.put("path", sinkPath.getAbsolutePath());
- properties.put("format", "csv");
- properties.put("csv.disable-quote-character", "true");
+ tableEnv.createTemporaryTable(
+ tableName,
+ TableDescriptor.forConnector(FileSystemTableFactory.IDENTIFIER)
+ .option(FileSystemConnectorOptions.PATH,
sinkPath.getAbsolutePath())
+ .option("csv.disable-quote-character", "true")
+ .format(CsvCommons.IDENTIFIER)
+ .schema(schema)
+ .build());
- createTestSinkTable(tableName, fieldNameAndTypes, null, properties);
return sinkPath;
}
+
+ private void createTestValuesSourceTable(
+ String tableName,
+ List<Row> data,
+ Schema schema,
+ Map<String, String> extraProperties,
+ @Nullable String... partitionFields) {
+
+ String dataId = TestValuesTableFactory.registerData(data);
+ Map<String, String> properties = new HashMap<>();
+ properties.put("data-id", dataId);
+ properties.put("bounded", "true");
+ properties.put("disable-lookup", "true");
+ properties.putAll(extraProperties);
+
+ TableDescriptor.Builder descriptor =
+ TableDescriptor.forConnector("values")
+ .schema(schema)
+ .partitionedBy(partitionFields);
+
+ for (Map.Entry<String, String> entry : properties.entrySet()) {
+ descriptor.option(entry.getKey(), entry.getValue());
+ }
+
+ tableEnv.createTemporaryTable(tableName, descriptor.build());
+ }
+
+ private void assertResult(List<String> expected, File resultFile) throws
IOException {
+ List<String> actual = readLines(resultFile);
+ assertResult(expected, actual);
+ }
+
+ private void assertResult(List<String> expected, List<String> actual) {
+ Collections.sort(expected);
+ Collections.sort(actual);
+ assertThat(actual).isEqualTo(expected);
+ }
Review comment:
Using assertJ could be reduced to
`assertThat(actual).hasSameElementsAs(expected);`
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]