This is an automated email from the ASF dual-hosted git repository.
cancai pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/calcite.git
The following commit(s) were added to refs/heads/main by this push:
new 66d4ccc73a [CALCITE-6952] JDBC adapter for StarRocks generates
incorrect SQL for REAL datatype
66d4ccc73a is described below
commit 66d4ccc73acaa3eb89d7533f589f81fd5e3285c5
Author: Yu Xu <[email protected]>
AuthorDate: Wed May 21 11:38:42 2025 +0800
[CALCITE-6952] JDBC adapter for StarRocks generates incorrect SQL for REAL
datatype
---
.../calcite/sql/dialect/StarRocksSqlDialect.java | 84 +++++++++++++++-------
.../calcite/rel/rel2sql/RelToSqlConverterTest.java | 38 +++++++++-
2 files changed, 95 insertions(+), 27 deletions(-)
diff --git
a/core/src/main/java/org/apache/calcite/sql/dialect/StarRocksSqlDialect.java
b/core/src/main/java/org/apache/calcite/sql/dialect/StarRocksSqlDialect.java
index 84323542d6..eab4e20be9 100644
--- a/core/src/main/java/org/apache/calcite/sql/dialect/StarRocksSqlDialect.java
+++ b/core/src/main/java/org/apache/calcite/sql/dialect/StarRocksSqlDialect.java
@@ -30,10 +30,14 @@
import org.apache.calcite.sql.SqlDataTypeSpec;
import org.apache.calcite.sql.SqlDialect;
import org.apache.calcite.sql.SqlLiteral;
+import org.apache.calcite.sql.SqlMapTypeNameSpec;
import org.apache.calcite.sql.SqlNode;
import org.apache.calcite.sql.SqlWriter;
import org.apache.calcite.sql.fun.SqlFloorFunction;
import org.apache.calcite.sql.parser.SqlParserPos;
+import org.apache.calcite.sql.type.AbstractSqlType;
+import org.apache.calcite.sql.type.BasicSqlType;
+import org.apache.calcite.sql.type.MapSqlType;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.util.RelToSqlConverterUtil;
@@ -146,32 +150,62 @@ public StarRocksSqlDialect(Context context) {
}
@Override public @Nullable SqlNode getCastSpec(RelDataType type) {
- switch (type.getSqlTypeName()) {
- case INTEGER:
- return new SqlDataTypeSpec(
- new SqlAlienSystemTypeNameSpec(
- "INT",
- type.getSqlTypeName(),
- SqlParserPos.ZERO),
- SqlParserPos.ZERO);
- case BIGINT:
- return new SqlDataTypeSpec(
- new SqlBasicTypeNameSpec(SqlTypeName.BIGINT, SqlParserPos.ZERO),
- SqlParserPos.ZERO);
- case TIMESTAMP:
- return new SqlDataTypeSpec(
- new SqlAlienSystemTypeNameSpec(
- "DATETIME",
- type.getSqlTypeName(),
- SqlParserPos.ZERO),
- SqlParserPos.ZERO);
- case VARCHAR:
- return new SqlDataTypeSpec(
- new SqlBasicTypeNameSpec(SqlTypeName.VARCHAR, type.getPrecision(),
SqlParserPos.ZERO),
- SqlParserPos.ZERO);
- default:
- return super.getCastSpec(type);
+ if (type instanceof BasicSqlType) {
+ switch (type.getSqlTypeName()) {
+ case INTEGER:
+ return new SqlDataTypeSpec(
+ new SqlAlienSystemTypeNameSpec(
+ "INT",
+ type.getSqlTypeName(),
+ SqlParserPos.ZERO),
+ SqlParserPos.ZERO);
+ case REAL:
+ return new SqlDataTypeSpec(
+ new SqlAlienSystemTypeNameSpec("FLOAT", type.getSqlTypeName(),
+ SqlParserPos.ZERO),
+ SqlParserPos.ZERO);
+ case BIGINT:
+ return new SqlDataTypeSpec(
+ new SqlBasicTypeNameSpec(SqlTypeName.BIGINT, SqlParserPos.ZERO),
+ SqlParserPos.ZERO);
+ case TIMESTAMP:
+ return new SqlDataTypeSpec(
+ new SqlAlienSystemTypeNameSpec(
+ "DATETIME",
+ type.getSqlTypeName(),
+ SqlParserPos.ZERO),
+ SqlParserPos.ZERO);
+ case VARCHAR:
+ return new SqlDataTypeSpec(
+ new SqlBasicTypeNameSpec(SqlTypeName.VARCHAR, type.getPrecision(),
SqlParserPos.ZERO),
+ SqlParserPos.ZERO);
+ default:
+ break;
+ }
+ }
+
+ if (type instanceof AbstractSqlType) {
+ switch (type.getSqlTypeName()) {
+ case MAP:
+ MapSqlType mapSqlType = (MapSqlType) type;
+ SqlDataTypeSpec keySpec = (SqlDataTypeSpec)
getCastSpec(mapSqlType.getKeyType());
+ SqlDataTypeSpec valueSpec =
+ (SqlDataTypeSpec) getCastSpec(mapSqlType.getValueType());
+ @SuppressWarnings("argument.type.incompatible")
+ SqlMapTypeNameSpec sqlMapTypeNameSpec =
+ new SqlMapTypeNameSpec(keySpec, valueSpec, SqlParserPos.ZERO);
+ return new SqlDataTypeSpec(sqlMapTypeNameSpec,
+ SqlParserPos.ZERO);
+ case ARRAY:
+ case MULTISET:
+ throw new UnsupportedOperationException("StarRocks dialect does not
support cast to "
+ + type.getSqlTypeName());
+ default:
+ break;
+ }
}
+
+ return super.getCastSpec(type);
}
@Override public void unparseDateTimeLiteral(SqlWriter writer,
diff --git
a/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java
b/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java
index 04dd3894c1..e431129723 100644
---
a/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java
+++
b/core/src/test/java/org/apache/calcite/rel/rel2sql/RelToSqlConverterTest.java
@@ -9918,7 +9918,9 @@ private void checkLiteral2(String expression, String
expected) {
/** Test case for
* <a
href="https://issues.apache.org/jira/browse/CALCITE-6940">[CALCITE-6940]
- * Hive/Phoenix Dialect should not cast to REAL type directly</a>. */
+ * Hive/Phoenix Dialect should not cast to REAL type directly</a>,
+ * <a
href="https://issues.apache.org/jira/browse/CALCITE-6952">[CALCITE-6952]
+ * JDBC adapter for StarRocks generates incorrect SQL for REAL datatype</a>.
*/
@Test void testRealTypesCast() {
String query = "SELECT CAST(\"department_id\" AS float), "
+ "CAST(\"department_id\" AS double), "
@@ -9932,16 +9934,20 @@ private void checkLiteral2(String expression, String
expected) {
sql(query)
.withPhoenix().ok(expectedPhoenix)
+ .withStarRocks().ok(expectedHive)
.withHive().ok(expectedHive);
}
/** Test case for
* <a
href="https://issues.apache.org/jira/browse/CALCITE-6940">[CALCITE-6940]
- * Hive/Phoenix Dialect should not cast to REAL type directly</a>. */
+ * Hive/Phoenix Dialect should not cast to REAL type directly</a>,
+ * <a
href="https://issues.apache.org/jira/browse/CALCITE-6952">[CALCITE-6952]
+ * JDBC adapter for StarRocks generates incorrect SQL for REAL datatype</a>.
*/
@Test void testRealNestedTypesCast() {
String query = "SELECT CAST(array[1,2,3] AS real array) FROM \"employee\"";
sql(query)
.withPhoenix().throws_("Phoenix dialect does not support cast to
ARRAY")
+ .withStarRocks().throws_("StarRocks dialect does not support cast to
ARRAY")
.withHive().throws_("Hive dialect does not support cast to ARRAY");
String query1 = "SELECT CAST(MAP[1.0,2.0,3.0,4.0] AS MAP<FLOAT, REAL>)
FROM \"employee\"";
@@ -9952,7 +9958,35 @@ private void checkLiteral2(String expression, String
expected) {
String query2 = "SELECT CAST(array[1,2,3] AS real multiset) FROM
\"employee\"";
sql(query2)
.withPhoenix().throws_("Phoenix dialect does not support cast to
MULTISET")
+ .withStarRocks().throws_("StarRocks dialect does not support cast to
MULTISET")
.withHive().throws_("Hive dialect does not support cast to MULTISET");
+
+ String query3 = "SELECT CAST(MAP[1.0,2.0,3.0,4.0] AS MAP<FLOAT, REAL>)
FROM \"employee\"";
+ String expectedStarRocks = "SELECT CAST(MAP { 1.0 : 2.0, 3.0 : 4.0 } AS
MAP< FLOAT, FLOAT >)\n"
+ + "FROM `foodmart`.`employee`";
+ sql(query3)
+ .withStarRocks()
+ .ok(expectedStarRocks);
+
+ String query4 = "SELECT CAST(MAP[1.0,MAP[3.0,4.0]]"
+ + " AS MAP<FLOAT, MAP<REAL, REAL>>)"
+ + " FROM \"employee\"";
+ String expectedStarRocks1 = "SELECT CAST(MAP { 1.0 : MAP { 3.0 : 4.0 } }"
+ + " AS MAP< FLOAT, MAP< FLOAT, FLOAT > >)\n"
+ + "FROM `foodmart`.`employee`";
+ sql(query4)
+ .withStarRocks()
+ .ok(expectedStarRocks1);
+
+ String query5 = "SELECT CAST(\"department_id\" AS float), "
+ + "CAST(\"department_id\" AS double), "
+ + "CAST(\"department_id\" AS real) FROM \"employee\"";
+ String expectedStarRocks2 = "SELECT CAST(`department_id` AS FLOAT), "
+ + "CAST(`department_id` AS DOUBLE), "
+ + "CAST(`department_id` AS FLOAT)\nFROM `foodmart`.`employee`";
+ sql(query5)
+ .withStarRocks()
+ .ok(expectedStarRocks2);
}
@Test void testAntiJoinWithComplexInput2() {