dtenedor commented on code in PR #46312: URL: https://github.com/apache/spark/pull/46312#discussion_r1586662927
########## sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala: ########## @@ -84,9 +84,16 @@ object ResolveDefaultColumns extends QueryErrorsBase if (SQLConf.get.enableDefaultColumns) { val newFields: Seq[StructField] = tableSchema.fields.map { field => if (field.metadata.contains(CURRENT_DEFAULT_COLUMN_METADATA_KEY)) { - val analyzed: Expression = analyze(field, statementType) + val defaultSql: String = if (field.dataType.isInstanceOf[VariantType]) { + // A variant's SQL/string representation is its JSON string which cannot be directly Review Comment: It sounds like we should implement the type coercion instead of doing this approach. Then we can correctly store the variant value in as the column default expression rather than a string. ########## sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/util/ResolveDefaultColumnsUtil.scala: ########## @@ -84,9 +84,16 @@ object ResolveDefaultColumns extends QueryErrorsBase if (SQLConf.get.enableDefaultColumns) { val newFields: Seq[StructField] = tableSchema.fields.map { field => if (field.metadata.contains(CURRENT_DEFAULT_COLUMN_METADATA_KEY)) { - val analyzed: Expression = analyze(field, statementType) + val defaultSql: String = if (field.dataType.isInstanceOf[VariantType]) { Review Comment: You are changing an OSS Spark file in DBR. You will need EDGE comments to indicate the part of the code that is different in DBR vs. OSS; please read go/dbr-devguide for instructions. -- This is an automated message from the Apache Git Service. To respond to the message, please log on to GitHub and use the URL above to go to the specific comment. To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For queries about this service, please contact Infrastructure at: us...@infra.apache.org --------------------------------------------------------------------- To unsubscribe, e-mail: reviews-unsubscr...@spark.apache.org For additional commands, e-mail: reviews-h...@spark.apache.org