This is an automated email from the ASF dual-hosted git repository. yao pushed a commit to branch master in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/master by this push: new 6e89fa9e8945 Revert "[SPARK-51905][SQL] Disallow NOT ENFORCED CHECK constraint" 6e89fa9e8945 is described below commit 6e89fa9e8945ff31c135c40274a13be6dc2e9e7a Author: Kent Yao <y...@apache.org> AuthorDate: Tue May 6 16:34:24 2025 +0800 Revert "[SPARK-51905][SQL] Disallow NOT ENFORCED CHECK constraint" This reverts commit 61a6dee3ed4e0773454198c80a639a97de0df2af. --- .../sql/catalyst/expressions/constraints.scala | 14 +--- .../command/CheckConstraintParseSuite.scala | 83 ---------------------- .../command/ConstraintParseSuiteBase.scala | 20 ++---- .../command/ForeignKeyConstraintParseSuite.scala | 4 -- .../command/PrimaryKeyConstraintParseSuite.scala | 2 - .../command/UniqueConstraintParseSuite.scala | 3 - 6 files changed, 7 insertions(+), 119 deletions(-) diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/constraints.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/constraints.scala index 8dad8285d50a..15b8b7e2e731 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/constraints.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/constraints.scala @@ -150,20 +150,8 @@ case class CheckConstraint( override def withTableName(tableName: String): TableConstraint = copy(tableName = tableName) - override def withUserProvidedCharacteristic(c: ConstraintCharacteristic): TableConstraint = { - if (c.enforced.contains(false)) { - val origin = CurrentOrigin.get - throw new ParseException( - command = origin.sqlText, - start = origin, - errorClass = "UNSUPPORTED_CONSTRAINT_CHARACTERISTIC", - messageParameters = Map( - "characteristic" -> "NOT ENFORCED", - "constraintType" -> "CHECK") - ) - } + override def withUserProvidedCharacteristic(c: ConstraintCharacteristic): TableConstraint = copy(userProvidedCharacteristic = c) - } } // scalastyle:off line.size.limit diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/CheckConstraintParseSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/CheckConstraintParseSuite.scala index cfae1ea31e0d..2df42b1b429e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/CheckConstraintParseSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/CheckConstraintParseSuite.scala @@ -316,87 +316,4 @@ class CheckConstraintParseSuite extends ConstraintParseSuiteBase { } } - test("NOT ENFORCED is not supported for CHECK -- table level") { - notEnforcedConstraintCharacteristics.foreach { case (c1, c2, _) => - val characteristic = if (c2.isEmpty) { - c1 - } else { - s"$c1 $c2" - } - val sql = - s""" - |CREATE TABLE a.b.t (a INT, b STRING, CONSTRAINT C1 CHECK (a > 0) $characteristic) - |""".stripMargin - - val expectedContext = ExpectedContext( - fragment = s"CONSTRAINT C1 CHECK (a > 0) $characteristic" - ) - - checkError( - exception = intercept[ParseException] { - parsePlan(sql) - }, - condition = "UNSUPPORTED_CONSTRAINT_CHARACTERISTIC", - parameters = Map( - "characteristic" -> "NOT ENFORCED", - "constraintType" -> "CHECK"), - queryContext = Array(expectedContext)) - } - } - - test("NOT ENFORCED is not supported for CHECK -- column level") { - notEnforcedConstraintCharacteristics.foreach { case (c1, c2, _) => - val characteristic = if (c2.isEmpty) { - c1 - } else { - s"$c1 $c2" - } - val sql = - s""" - |CREATE TABLE a.b.t (a INT CHECK (a > 0) $characteristic, b STRING) - |""".stripMargin - - val expectedContext = ExpectedContext( - fragment = s"CHECK (a > 0) $characteristic" - ) - - checkError( - exception = intercept[ParseException] { - parsePlan(sql) - }, - condition = "UNSUPPORTED_CONSTRAINT_CHARACTERISTIC", - parameters = Map( - "characteristic" -> "NOT ENFORCED", - "constraintType" -> "CHECK"), - queryContext = Array(expectedContext)) - } - } - - test("NOT ENFORCED is not supported for CHECK -- ALTER TABLE") { - notEnforcedConstraintCharacteristics.foreach { case (c1, c2, _) => - val characteristic = if (c2.isEmpty) { - c1 - } else { - s"$c1 $c2" - } - val sql = - s""" - |ALTER TABLE a.b.t ADD CONSTRAINT C1 CHECK (a > 0) $characteristic - |""".stripMargin - - val expectedContext = ExpectedContext( - fragment = s"CONSTRAINT C1 CHECK (a > 0) $characteristic" - ) - - checkError( - exception = intercept[ParseException] { - parsePlan(sql) - }, - condition = "UNSUPPORTED_CONSTRAINT_CHARACTERISTIC", - parameters = Map( - "characteristic" -> "NOT ENFORCED", - "constraintType" -> "CHECK"), - queryContext = Array(expectedContext)) - } - } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ConstraintParseSuiteBase.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ConstraintParseSuiteBase.scala index dadc791138ac..8b5ddb506f78 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ConstraintParseSuiteBase.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ConstraintParseSuiteBase.scala @@ -26,8 +26,13 @@ import org.apache.spark.sql.types.{IntegerType, StringType} abstract class ConstraintParseSuiteBase extends AnalysisTest with SharedSparkSession { protected def validConstraintCharacteristics = Seq( ("", "", ConstraintCharacteristic(enforced = None, rely = None)), + ("NOT ENFORCED", "", ConstraintCharacteristic(enforced = Some(false), rely = None)), ("", "RELY", ConstraintCharacteristic(enforced = None, rely = Some(true))), - ("", "NORELY", ConstraintCharacteristic(enforced = None, rely = Some(false))) + ("", "NORELY", ConstraintCharacteristic(enforced = None, rely = Some(false))), + ("NOT ENFORCED", "RELY", + ConstraintCharacteristic(enforced = Some(false), rely = Some(true))), + ("NOT ENFORCED", "NORELY", + ConstraintCharacteristic(enforced = Some(false), rely = Some(false))) ) protected def enforcedConstraintCharacteristics = Seq( @@ -38,19 +43,6 @@ abstract class ConstraintParseSuiteBase extends AnalysisTest with SharedSparkSes ("NORELY", "ENFORCED", ConstraintCharacteristic(enforced = Some(true), rely = Some(false))) ) - protected def notEnforcedConstraintCharacteristics = Seq( - ("NOT ENFORCED", "RELY", - ConstraintCharacteristic(enforced = Some(false), rely = Some(true))), - ("NOT ENFORCED", "NORELY", - ConstraintCharacteristic(enforced = Some(false), rely = Some(false))), - ("RELY", "NOT ENFORCED", - ConstraintCharacteristic(enforced = Some(false), rely = Some(true))), - ("NORELY", "NOT ENFORCED", - ConstraintCharacteristic(enforced = Some(false), rely = Some(false))), - ("NOT ENFORCED", "", - ConstraintCharacteristic(enforced = Some(false), rely = None)) - ) - protected val invalidConstraintCharacteristics = Seq( ("ENFORCED", "ENFORCED"), ("ENFORCED", "NOT ENFORCED"), diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ForeignKeyConstraintParseSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ForeignKeyConstraintParseSuite.scala index 4b89611a8b87..a4736555a24e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ForeignKeyConstraintParseSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/ForeignKeyConstraintParseSuite.scala @@ -23,10 +23,6 @@ import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.AddConstraint class ForeignKeyConstraintParseSuite extends ConstraintParseSuiteBase { - - override val validConstraintCharacteristics = - super.validConstraintCharacteristics ++ notEnforcedConstraintCharacteristics - test("Create table with foreign key - table level") { val sql = "CREATE TABLE t (a INT, b STRING," + " FOREIGN KEY (a) REFERENCES parent(id)) USING parquet" diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PrimaryKeyConstraintParseSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PrimaryKeyConstraintParseSuite.scala index 711db63ad424..fae7ed14de7b 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PrimaryKeyConstraintParseSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/PrimaryKeyConstraintParseSuite.scala @@ -24,8 +24,6 @@ import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.AddConstraint class PrimaryKeyConstraintParseSuite extends ConstraintParseSuiteBase { - override val validConstraintCharacteristics = - super.validConstraintCharacteristics ++ notEnforcedConstraintCharacteristics test("Create table with primary key - table level") { val sql = "CREATE TABLE t (a INT, b STRING, PRIMARY KEY (a)) USING parquet" diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/UniqueConstraintParseSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/UniqueConstraintParseSuite.scala index 4f07ffa08736..704b8417cd55 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/UniqueConstraintParseSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/UniqueConstraintParseSuite.scala @@ -23,9 +23,6 @@ import org.apache.spark.sql.catalyst.parser.ParseException import org.apache.spark.sql.catalyst.plans.logical.AddConstraint class UniqueConstraintParseSuite extends ConstraintParseSuiteBase { - override val validConstraintCharacteristics = - super.validConstraintCharacteristics ++ notEnforcedConstraintCharacteristics - test("Create table with unnamed unique constraint") { Seq( "CREATE TABLE t (a INT, b STRING, UNIQUE (a)) USING parquet", --------------------------------------------------------------------- To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org For additional commands, e-mail: commits-h...@spark.apache.org