This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 8b6b28b5af3b [SPARK-52141][SQL] Display constraints in DESC commands
8b6b28b5af3b is described below

commit 8b6b28b5af3b0b548e7e2d2914d08cdf3981689e
Author: dengziming <dengzim...@bytedance.com>
AuthorDate: Fri Jul 25 10:27:11 2025 -0700

    [SPARK-52141][SQL] Display constraints in DESC commands
    
    ### What changes were proposed in this pull request?
    Show constraints in `desc extended table`
    
    ### Why are the changes needed?
    Constraint information is the SQL standard and listed in SPIP change.
    
    ### Does this PR introduce _any_ user-facing change?
    Yes, add constraints information in  DESC command output.
    
    ### How was this patch tested?
    1. For `desc extended table` command, I added a test case in 
`DescribeTableSuite`
    2. It's useless to change SQL in `describe.sql` because it's also using v1 
table, and constraints are only supported in v2 table.
    
    ### Was this patch authored or co-authored using generative AI tooling?
    No.
    
    Closes #51577 from dengziming/SPARK-52141.
    
    Lead-authored-by: dengziming <dengzim...@bytedance.com>
    Co-authored-by: Gengliang Wang <gengli...@apache.org>
    Signed-off-by: Gengliang Wang <gengli...@apache.org>
---
 .../catalog/constraints/BaseConstraint.java        | 10 ++++
 .../connector/catalog/constraints/Constraint.java  |  6 +++
 .../datasources/v2/DescribeTableExec.scala         | 11 +++++
 .../execution/command/v2/DescribeTableSuite.scala  | 55 ++++++++++++++++++++++
 4 files changed, 82 insertions(+)

diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/BaseConstraint.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/BaseConstraint.java
index 6f9ee47175ac..f93d716a2784 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/BaseConstraint.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/BaseConstraint.java
@@ -73,6 +73,16 @@ abstract class BaseConstraint implements Constraint {
         rely ? "RELY" : "NORELY");
   }
 
+  public String toDescription() {
+    StringJoiner joiner = new StringJoiner(" ");
+    joiner.add(definition());
+    joiner.add(enforced ? "ENFORCED" : "NOT ENFORCED");
+    if (rely) {
+      joiner.add("RELY");
+    }
+    return joiner.toString();
+  }
+
   @Override
   public String toString() {
     return toDDL();
diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/Constraint.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/Constraint.java
index c3a2cd73e9ab..93ffb22422e6 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/Constraint.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/Constraint.java
@@ -58,6 +58,12 @@ public interface Constraint {
    */
   String toDDL();
 
+  /**
+   * Returns the constraint description for DESCRIBE TABLE output, excluding 
the constraint
+   * name (shown separately).
+   */
+  String toDescription();
+
   /**
    * Instantiates a builder for a CHECK constraint.
    *
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeTableExec.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeTableExec.scala
index 4e01b9f23f60..3cc767380d8b 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeTableExec.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DescribeTableExec.scala
@@ -45,6 +45,7 @@ case class DescribeTableExec(
       addMetadataColumns(rows)
       addTableDetails(rows)
       addTableStats(rows)
+      addTableConstraints(rows)
     }
     rows.toSeq
   }
@@ -88,6 +89,16 @@ case class DescribeTableExec(
     }
   }
 
+  private def addTableConstraints(rows: ArrayBuffer[InternalRow]): Unit = {
+    if (table.constraints.nonEmpty) {
+      rows += emptyRow()
+      rows += toCatalystRow("# Constraints", "", "")
+      rows ++= table.constraints().map{ constraint =>
+        toCatalystRow(constraint.name(), constraint.toDescription, "")
+      }
+    }
+  }
+
   private def addMetadataColumns(rows: ArrayBuffer[InternalRow]): Unit = table 
match {
     case hasMeta: SupportsMetadataColumns if hasMeta.metadataColumns.nonEmpty 
=>
       rows += emptyRow()
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DescribeTableSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DescribeTableSuite.scala
index 9cd7f0d8aade..89c613389eb2 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DescribeTableSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/DescribeTableSuite.scala
@@ -213,4 +213,59 @@ class DescribeTableSuite extends 
command.DescribeTableSuiteBase
       assert("""\d+\s+bytes,\s+4\s+rows""".r.matches(stats))
     }
   }
+
+  test("desc table constraints") {
+    withNamespaceAndTable("ns", "pk_table", nonPartitionCatalog) { tbl =>
+      withTable("fk_table") {
+        sql(
+          s"""
+             |CREATE TABLE fk_table (id INT PRIMARY KEY) USING parquet
+        """.stripMargin)
+        sql(
+          s"""
+             |CREATE TABLE $tbl (
+             |  id INT,
+             |  a INT,
+             |  b STRING,
+             |  c STRING,
+             |  PRIMARY KEY (id),
+             |  CONSTRAINT fk_a FOREIGN KEY (a) REFERENCES fk_table(id) RELY,
+             |  CONSTRAINT uk_b UNIQUE (b),
+             |  CONSTRAINT uk_a_c UNIQUE (a, c),
+             |  CONSTRAINT c1 CHECK (c IS NOT NULL),
+             |  CONSTRAINT c2 CHECK (id > 0)
+             |)
+             |$defaultUsing
+        """.stripMargin)
+
+        // Skipped showing NORELY since it is the default value.
+        var expectedConstraintsDdl = Array(
+          "# Constraints,,",
+          "pk_table_pk,PRIMARY KEY (id) NOT ENFORCED,",
+          "fk_a,FOREIGN KEY (a) REFERENCES fk_table (id) NOT ENFORCED RELY,",
+          "uk_b,UNIQUE (b) NOT ENFORCED,",
+          "uk_a_c,UNIQUE (a, c) NOT ENFORCED,",
+          "c1,CHECK (c IS NOT NULL) ENFORCED,",
+          "c2,CHECK (id > 0) ENFORCED,"
+        )
+        var descDdL = sql(s"DESCRIBE EXTENDED 
$tbl").collect().map(_.mkString(","))
+          .dropWhile(_ != "# Constraints,,")
+        assert(descDdL === expectedConstraintsDdl)
+
+        // Show non-default value for RELY.
+        sql(s"ALTER TABLE $tbl ADD CONSTRAINT c3 CHECK (b IS NOT NULL) RELY")
+        descDdL = sql(s"DESCRIBE EXTENDED $tbl").collect().map(_.mkString(","))
+          .dropWhile(_ != "# Constraints,,")
+        expectedConstraintsDdl = expectedConstraintsDdl ++
+          Array("c3,CHECK (b IS NOT NULL) ENFORCED RELY,")
+        assert(descDdL === expectedConstraintsDdl)
+
+        sql(s"ALTER TABLE $tbl DROP CONSTRAINT c1")
+        descDdL = sql(s"DESCRIBE EXTENDED $tbl").collect().map(_.mkString(","))
+          .dropWhile(_ != "# Constraints,,")
+        assert(descDdL === expectedConstraintsDdl
+          .filter(_ != "c1,CHECK (c IS NOT NULL) ENFORCED,"))
+      }
+    }
+  }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to