mihailom-db commented on code in PR #48883:
URL: https://github.com/apache/spark/pull/48883#discussion_r1847782313
##########
sql/api/src/main/scala/org/apache/spark/sql/types/DataType.scala:
##########
@@ -458,6 +458,10 @@ object DataType {
private[sql] def equalsIgnoreCompatibleCollation(from: DataType, to:
DataType): Boolean = {
(from, to) match {
// String types with possibly different collations are compatible.
+ case (CharType(l1), CharType(l2)) => l1 == l2
+ case (VarcharType(l1), VarcharType(l2)) => l1 == l2
+ case (CharType(_) | VarcharType(_), _: StringType) => false
+ case (_: StringType, CharType(_) | VarcharType(_)) => false
case (_: StringType, _: StringType) => true
Review Comment:
@stefankandic does this seem ok to you? I do not want to break delta. Is
this only used in ddl operations?
##########
sql/api/src/main/scala/org/apache/spark/sql/types/VarcharType.scala:
##########
@@ -16,14 +16,24 @@
*/
package org.apache.spark.sql.types
+import org.json4s.JsonAST.{JString, JValue}
+
import org.apache.spark.annotation.Experimental
@Experimental
-case class VarcharType(length: Int) extends AtomicType {
+case class VarcharType(length: Int) extends StringType(0, Some(length)) {
Review Comment:
ditto
##########
sql/api/src/main/scala/org/apache/spark/sql/types/CharType.scala:
##########
@@ -17,14 +17,24 @@
package org.apache.spark.sql.types
+import org.json4s.JsonAST.{JString, JValue}
+
import org.apache.spark.annotation.Experimental
@Experimental
-case class CharType(length: Int) extends AtomicType {
+case class CharType(length: Int) extends StringType(0, Some(length)) {
Review Comment:
Let's not put 0 here, but UTF8_BINARY constatnt
##########
sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/literals.scala:
##########
@@ -196,6 +196,8 @@ object Literal {
case TimestampNTZType => create(0L, TimestampNTZType)
case it: DayTimeIntervalType => create(0L, it)
case it: YearMonthIntervalType => create(0, it)
+ case CharType(_) | VarcharType(_) =>
+ throw QueryExecutionErrors.noDefaultForDataTypeError(dataType)
Review Comment:
Why not add default for these types?
##########
sql/api/src/main/scala/org/apache/spark/sql/types/StringType.scala:
##########
@@ -101,7 +107,7 @@ class StringType private (val collationId: Int) extends
AtomicType with Serializ
* @since 1.3.0
*/
@Stable
-case object StringType extends StringType(0) {
+case object StringType extends StringType(0, None) {
Review Comment:
could you update 0 here as well
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]