slinkydeveloper commented on a change in pull request #17396:
URL: https://github.com/apache/flink/pull/17396#discussion_r720215123
##########
File path:
flink-table/flink-table-planner/src/test/java/org/apache/flink/table/planner/functions/CastFunctionITCase.java
##########
@@ -18,176 +18,905 @@
package org.apache.flink.table.planner.functions;
-import org.apache.flink.table.annotation.DataTypeHint;
-import org.apache.flink.table.api.DataTypes;
+import org.apache.flink.table.api.TableEnvironment;
+import org.apache.flink.table.api.config.TableConfigOptions;
import org.apache.flink.table.functions.BuiltInFunctionDefinitions;
-import org.apache.flink.table.functions.ScalarFunction;
-import org.apache.flink.types.Row;
+import org.apache.flink.table.types.AbstractDataType;
+import org.apache.flink.table.types.DataType;
import org.junit.runners.Parameterized;
+import java.math.BigDecimal;
+import java.time.Duration;
+import java.time.Instant;
+import java.time.LocalDate;
+import java.time.LocalDateTime;
+import java.time.LocalTime;
+import java.time.Period;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
+import static org.apache.flink.table.api.DataTypes.ARRAY;
+import static org.apache.flink.table.api.DataTypes.BIGINT;
+import static org.apache.flink.table.api.DataTypes.BINARY;
+import static org.apache.flink.table.api.DataTypes.BOOLEAN;
+import static org.apache.flink.table.api.DataTypes.BYTES;
+import static org.apache.flink.table.api.DataTypes.CHAR;
+import static org.apache.flink.table.api.DataTypes.DATE;
+import static org.apache.flink.table.api.DataTypes.DAY;
+import static org.apache.flink.table.api.DataTypes.DECIMAL;
+import static org.apache.flink.table.api.DataTypes.DOUBLE;
+import static org.apache.flink.table.api.DataTypes.FLOAT;
+import static org.apache.flink.table.api.DataTypes.INT;
+import static org.apache.flink.table.api.DataTypes.INTERVAL;
+import static org.apache.flink.table.api.DataTypes.MONTH;
+import static org.apache.flink.table.api.DataTypes.SMALLINT;
+import static org.apache.flink.table.api.DataTypes.STRING;
+import static org.apache.flink.table.api.DataTypes.TIME;
+import static org.apache.flink.table.api.DataTypes.TIMESTAMP;
+import static org.apache.flink.table.api.DataTypes.TIMESTAMP_LTZ;
+import static org.apache.flink.table.api.DataTypes.TINYINT;
+import static org.apache.flink.table.api.DataTypes.VARBINARY;
+import static org.apache.flink.table.api.DataTypes.VARCHAR;
+import static org.apache.flink.table.api.DataTypes.YEAR;
import static org.apache.flink.table.api.Expressions.$;
-import static org.apache.flink.table.api.Expressions.call;
-import static org.apache.flink.table.api.Expressions.row;
/** Tests for {@link BuiltInFunctionDefinitions#CAST}. */
public class CastFunctionITCase extends BuiltInFunctionTestBase {
+ private static final ZoneId TEST_TZ = ZoneId.of("Asia/Shanghai");
+
+ @Override
+ protected TableEnvironment env() {
+ TableEnvironment env = super.env();
+
env.getConfig().getConfiguration().set(TableConfigOptions.LOCAL_TIME_ZONE,
TEST_TZ.getId());
+ return env;
+ }
+
@Parameterized.Parameters(name = "{index}: {0}")
public static List<TestSpec> testData() {
return Arrays.asList(
- TestSpec.forFunction(
- BuiltInFunctionDefinitions.CAST,
- "implicit with different field names")
- .onFieldsWithData(Row.of(12, "Hello"))
- .andDataTypes(DataTypes.of("ROW<otherNameInt INT,
otherNameString STRING>"))
- .withFunction(RowToFirstField.class)
- .testResult(
- call("RowToFirstField", $("f0")),
- "RowToFirstField(f0)",
- 12,
- DataTypes.INT()),
- TestSpec.forFunction(BuiltInFunctionDefinitions.CAST,
"implicit with type widening")
- .onFieldsWithData(Row.of((byte) 12, "Hello"))
- .andDataTypes(DataTypes.of("ROW<i TINYINT, s STRING>"))
- .withFunction(RowToFirstField.class)
- .testResult(
- call("RowToFirstField", $("f0")),
- "RowToFirstField(f0)",
- 12,
- DataTypes.INT()),
- TestSpec.forFunction(
- BuiltInFunctionDefinitions.CAST,
- "implicit with nested type widening")
- .onFieldsWithData(Row.of(Row.of(12, 42), "Hello"))
- .andDataTypes(DataTypes.of("ROW<r ROW<i1 INT, i2 INT>,
s STRING>"))
- .withFunction(NestedRowToFirstField.class)
- .testResult(
- call("NestedRowToFirstField", $("f0")),
- "NestedRowToFirstField(f0)",
- Row.of(12, 42.0),
- DataTypes.of("ROW<i INT, d DOUBLE>")),
- TestSpec.forFunction(
- BuiltInFunctionDefinitions.CAST,
- "explicit with nested rows and implicit
nullability change")
- .onFieldsWithData(Row.of(Row.of(12, 42, null),
"Hello"))
- .andDataTypes(DataTypes.of("ROW<r ROW<i1 INT, i2 INT,
i3 INT>, s STRING>"))
- .testResult(
- $("f0").cast(
- DataTypes.ROW(
- DataTypes.FIELD(
- "r",
- DataTypes.ROW(
-
DataTypes.FIELD(
-
"s",
-
DataTypes.STRING()),
-
DataTypes.FIELD(
-
"b",
-
DataTypes
-
.BOOLEAN()),
-
DataTypes.FIELD(
-
"i",
-
DataTypes.INT()))),
- DataTypes.FIELD("s",
DataTypes.STRING()))),
- "CAST(f0 AS ROW<r ROW<s STRING NOT NULL, b
BOOLEAN, i INT>, s STRING>)",
- Row.of(Row.of("12", true, null), "Hello"),
- // the inner NOT NULL is ignored in SQL
because the outer ROW is
- // nullable and the cast does not allow
setting the outer
- // nullability but derives it from the source
operand
- DataTypes.of("ROW<r ROW<s STRING, b BOOLEAN, i
INT>, s STRING>")),
- TestSpec.forFunction(
- BuiltInFunctionDefinitions.CAST,
- "explicit with nested rows and explicit
nullability change")
- .onFieldsWithData(Row.of(Row.of(12, 42, null),
"Hello"))
- .andDataTypes(DataTypes.of("ROW<r ROW<i1 INT, i2 INT,
i3 INT>, s STRING>"))
- .testTableApiResult(
- $("f0").cast(
- DataTypes.ROW(
- DataTypes.FIELD(
- "r",
- DataTypes.ROW(
-
DataTypes.FIELD(
-
"s",
-
DataTypes.STRING()
-
.notNull()),
-
DataTypes.FIELD(
-
"b",
-
DataTypes
-
.BOOLEAN()),
-
DataTypes.FIELD(
-
"i",
-
DataTypes.INT()))),
- DataTypes.FIELD("s",
DataTypes.STRING()))),
- Row.of(Row.of("12", true, null), "Hello"),
- DataTypes.of(
- "ROW<r ROW<s STRING NOT NULL, b
BOOLEAN, i INT>, s STRING>")),
- TestSpec.forFunction(
- BuiltInFunctionDefinitions.CAST,
- "implicit between structured type and row")
- .onFieldsWithData(12, "Ingo")
- .withFunction(StructuredTypeConstructor.class)
- .withFunction(RowToFirstField.class)
- .testResult(
- call(
- "RowToFirstField",
- call("StructuredTypeConstructor",
row($("f0"), $("f1")))),
-
"RowToFirstField(StructuredTypeConstructor((f0, f1)))",
- 12,
- DataTypes.INT()),
- TestSpec.forFunction(
- BuiltInFunctionDefinitions.CAST,
- "explicit between structured type and row")
- .onFieldsWithData(12, "Ingo")
- .withFunction(StructuredTypeConstructor.class)
- .testTableApiResult(
- call("StructuredTypeConstructor", row($("f0"),
$("f1")))
- .cast(
- DataTypes.ROW(
- DataTypes.BIGINT(),
DataTypes.STRING())),
- Row.of(12L, "Ingo"),
- DataTypes.ROW(DataTypes.BIGINT(),
DataTypes.STRING())));
+ CastTestSpecBuilder.test(CHAR(3), "To CHAR(3)")
+ .testCase(CHAR(3), "foo", "foo")
+ .testCase(CHAR(4), "foo", "foo ")
+ .testCase(CHAR(4), "foo ", "foo ")
+ .testCase(VARCHAR(3), "foo", "foo")
+ .testCase(VARCHAR(5), "foo", "foo")
+ .testCase(VARCHAR(5), "foo ", "foo ")
+ // https://issues.apache.org/jira/browse/FLINK-24413 -
Trim to precision
+ // in this case down to 3 chars
+ .testCase(STRING(), "abcdef", "abcdef") // "abc"
+ .testCase(DATE(), LocalDate.parse("2021-30-09"),
"2021-30-09") // "202"
+ .build(),
+ CastTestSpecBuilder.test(VARCHAR(3), "To VARCHAR(3)")
+ .testCase(CHAR(3), "foo", "foo")
+ .testCase(CHAR(4), "foo", "foo ")
+ .testCase(CHAR(4), "foo ", "foo ")
+ .testCase(VARCHAR(3), "foo", "foo")
+ .testCase(VARCHAR(5), "foo", "foo")
+ .testCase(VARCHAR(5), "foo ", "foo ")
+ // https://issues.apache.org/jira/browse/FLINK-24413 -
Trim to precision
+ // in this case down to 3 chars
+ .testCase(STRING(), "abcdef", "abcdef")
+ .build(),
+ CastTestSpecBuilder.test(STRING(), "To STRING")
+ .testCase(CHAR(3), "foo", "foo")
+ .testCase(CHAR(5), "foo", "foo ")
+ .testCase(VARCHAR(5), "Flink", "Flink")
+ .testCase(VARCHAR(10), "Flink", "Flink")
+ .testCase(STRING(), "Apache Flink", "Apache Flink")
+ .testCase(STRING(), null, null)
+ .testCase(BOOLEAN(), true, "true")
Review comment:
this should be uppercase: `TRUE`
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]