This is an automated email from the ASF dual-hosted git repository.

gengliang pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 984a2ec6eec3 [SPARK-52142][SQL] Display table constraints in SHOW 
CREATE TABLE COMMAND
984a2ec6eec3 is described below

commit 984a2ec6eec31a162ed7eb64b48ed9bf24876b36
Author: Gengliang Wang <gengli...@apache.org>
AuthorDate: Fri Jul 18 21:04:12 2025 -0700

    [SPARK-52142][SQL] Display table constraints in SHOW CREATE TABLE COMMAND
    
    ### What changes were proposed in this pull request?
    
    Display table constraints in the SHOW CREATE TABLE COMMAND
    
    ### Why are the changes needed?
    
    Displaying table constraints is expected for the SHOW CREATE TABLE COMMAND
    
    ### Does this PR introduce _any_ user-facing change?
    
    No
    
    ### How was this patch tested?
    
    Unit test
    ### Was this patch authored or co-authored using generative AI tooling?
    
    No
    
    Closes #51541 from gengliangwang/showCreateTable.
    
    Authored-by: Gengliang Wang <gengli...@apache.org>
    Signed-off-by: Gengliang Wang <gengli...@apache.org>
---
 .../catalog/constraints/BaseConstraint.java        |  5 +-
 .../sql/connector/catalog/ConstraintSuite.scala    | 30 ++++++++----
 .../datasources/v2/ShowCreateTableExec.scala       |  3 +-
 .../command/v2/CheckConstraintSuite.scala          | 24 +++++-----
 .../command/v2/ForeignKeyConstraintSuite.scala     | 16 +++----
 .../command/v2/PrimaryKeyConstraintSuite.scala     | 16 +++----
 .../command/v2/ShowCreateTableSuite.scala          | 56 ++++++++++++++++++++++
 .../command/v2/UniqueConstraintSuite.scala         | 16 +++----
 8 files changed, 117 insertions(+), 49 deletions(-)

diff --git 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/BaseConstraint.java
 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/BaseConstraint.java
index 28791a9f3a58..6f9ee47175ac 100644
--- 
a/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/BaseConstraint.java
+++ 
b/sql/catalyst/src/main/java/org/apache/spark/sql/connector/catalog/constraints/BaseConstraint.java
@@ -63,12 +63,13 @@ abstract class BaseConstraint implements Constraint {
 
   @Override
   public String toDDL() {
+    // The validation status is not included in the DDL output as it's not 
part of
+    // the Spark SQL syntax for constraints.
     return String.format(
-        "CONSTRAINT %s %s %s %s %s",
+        "CONSTRAINT %s %s %s %s",
         name,
         definition(),
         enforced ? "ENFORCED" : "NOT ENFORCED",
-        validationStatus,
         rely ? "RELY" : "NORELY");
   }
 
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/ConstraintSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/ConstraintSuite.scala
index 2d11bedb396f..d63e3095a2ef 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/ConstraintSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/connector/catalog/ConstraintSuite.scala
@@ -33,7 +33,8 @@ class ConstraintSuite extends SparkFunSuite {
       .validationStatus(ValidationStatus.VALID)
       .rely(true)
       .build()
-    assert(con1.toDDL == "CONSTRAINT con1 CHECK (id > 10) ENFORCED VALID RELY")
+    assert(con1.toDDL == "CONSTRAINT con1 CHECK (id > 10) ENFORCED RELY")
+    assert(con1.validationStatus() == ValidationStatus.VALID)
 
     val con2 = Constraint.check("con2")
     .predicate(
@@ -46,7 +47,8 @@ class ConstraintSuite extends SparkFunSuite {
       .validationStatus(ValidationStatus.VALID)
       .rely(true)
       .build()
-    assert(con2.toDDL == "CONSTRAINT con2 CHECK (a.`b.c`.d = 1) NOT ENFORCED 
VALID RELY")
+    assert(con2.toDDL == "CONSTRAINT con2 CHECK (a.`b.c`.d = 1) NOT ENFORCED 
RELY")
+    assert(con2.validationStatus() == ValidationStatus.VALID)
 
     val con3 = Constraint.check("con3")
       .predicateSql("a.b.c <=> 1")
@@ -60,10 +62,12 @@ class ConstraintSuite extends SparkFunSuite {
       .validationStatus(ValidationStatus.INVALID)
       .rely(false)
       .build()
-    assert(con3.toDDL == "CONSTRAINT con3 CHECK (a.b.c <=> 1) NOT ENFORCED 
INVALID NORELY")
+    assert(con3.toDDL == "CONSTRAINT con3 CHECK (a.b.c <=> 1) NOT ENFORCED 
NORELY")
+    assert(con3.validationStatus() == ValidationStatus.INVALID)
 
     val con4 = Constraint.check("con4").predicateSql("a = 1").build()
-    assert(con4.toDDL == "CONSTRAINT con4 CHECK (a = 1) ENFORCED UNVALIDATED 
NORELY")
+    assert(con4.toDDL == "CONSTRAINT con4 CHECK (a = 1) ENFORCED NORELY")
+    assert(con4.validationStatus() == ValidationStatus.UNVALIDATED)
   }
 
   test("UNIQUE constraint toDDL") {
@@ -74,7 +78,8 @@ class ConstraintSuite extends SparkFunSuite {
       .validationStatus(ValidationStatus.UNVALIDATED)
       .rely(true)
       .build()
-    assert(con1.toDDL == "CONSTRAINT con1 UNIQUE (a.b.c, d) NOT ENFORCED 
UNVALIDATED RELY")
+    assert(con1.toDDL == "CONSTRAINT con1 UNIQUE (a.b.c, d) NOT ENFORCED RELY")
+    assert(con1.validationStatus() == ValidationStatus.UNVALIDATED)
 
     val con2 = Constraint.unique(
         "con2",
@@ -83,7 +88,8 @@ class ConstraintSuite extends SparkFunSuite {
       .validationStatus(ValidationStatus.VALID)
       .rely(true)
       .build()
-    assert(con2.toDDL == "CONSTRAINT con2 UNIQUE (`a.b`.x.y, d) NOT ENFORCED 
VALID RELY")
+    assert(con2.toDDL == "CONSTRAINT con2 UNIQUE (`a.b`.x.y, d) NOT ENFORCED 
RELY")
+    assert(con2.validationStatus() == ValidationStatus.VALID)
   }
 
   test("PRIMARY KEY constraint toDDL") {
@@ -94,7 +100,8 @@ class ConstraintSuite extends SparkFunSuite {
       .validationStatus(ValidationStatus.VALID)
       .rely(true)
       .build()
-    assert(pk1.toDDL == "CONSTRAINT pk1 PRIMARY KEY (a.b.c, d) ENFORCED VALID 
RELY")
+    assert(pk1.toDDL == "CONSTRAINT pk1 PRIMARY KEY (a.b.c, d) ENFORCED RELY")
+    assert(pk1.validationStatus() == ValidationStatus.VALID)
 
     val pk2 = Constraint.primaryKey(
         "pk2",
@@ -103,7 +110,8 @@ class ConstraintSuite extends SparkFunSuite {
       .validationStatus(ValidationStatus.INVALID)
       .rely(false)
       .build()
-    assert(pk2.toDDL == "CONSTRAINT pk2 PRIMARY KEY (`x.y`.z, id) NOT ENFORCED 
INVALID NORELY")
+    assert(pk2.toDDL == "CONSTRAINT pk2 PRIMARY KEY (`x.y`.z, id) NOT ENFORCED 
NORELY")
+    assert(pk2.validationStatus() == ValidationStatus.INVALID)
   }
 
   test("FOREIGN KEY constraint toDDL") {
@@ -118,7 +126,8 @@ class ConstraintSuite extends SparkFunSuite {
       .build()
     assert(fk1.toDDL == "CONSTRAINT fk1 FOREIGN KEY (col1, col2) " +
       "REFERENCES schema.table (ref_col1, ref_col2) " +
-      "ENFORCED VALID RELY")
+      "ENFORCED RELY")
+    assert(fk1.validationStatus() == ValidationStatus.VALID)
 
     val fk2 = Constraint.foreignKey(
         "fk2",
@@ -131,6 +140,7 @@ class ConstraintSuite extends SparkFunSuite {
       .build()
     assert(fk2.toDDL == "CONSTRAINT fk2 FOREIGN KEY (`x.y`.z) " +
       "REFERENCES other_table (other_id) " +
-      "NOT ENFORCED INVALID NORELY")
+      "NOT ENFORCED NORELY")
+    assert(fk2.validationStatus() == ValidationStatus.INVALID)
   }
 }
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateTableExec.scala
 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateTableExec.scala
index 4195560c5cc1..56e786d3e933 100644
--- 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateTableExec.scala
+++ 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ShowCreateTableExec.scala
@@ -65,7 +65,8 @@ case class ShowCreateTableExec(
   private def showTableDataColumns(table: Table, builder: StringBuilder): Unit 
= {
     import org.apache.spark.sql.connector.catalog.CatalogV2Implicits._
     val columns = CharVarcharUtils.getRawSchema(table.columns.asSchema, 
conf).fields.map(_.toDDL)
-    builder ++= concatByMultiLines(columns)
+    val constraints = table.constraints().map(_.toDDL)
+    builder ++= concatByMultiLines(columns ++ constraints)
   }
 
   private def showTableUsing(table: Table, builder: StringBuilder): Unit = {
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/CheckConstraintSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/CheckConstraintSuite.scala
index 397d9248f628..de03653fc916 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/CheckConstraintSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/CheckConstraintSuite.scala
@@ -134,7 +134,7 @@ class CheckConstraintSuite extends QueryTest with 
CommandSuiteBase with DDLComma
       val constraint = getCheckConstraint(table)
       assert(constraint.name() == "c1")
       assert(constraint.toDDL ==
-        "CONSTRAINT c1 CHECK (from_json(j, 'a INT').a > 1) ENFORCED VALID 
NORELY")
+        "CONSTRAINT c1 CHECK (from_json(j, 'a INT').a > 1) ENFORCED NORELY")
       assert(constraint.predicateSql() == "from_json(j, 'a INT').a > 1")
       assert(constraint.predicate() == null)
     }
@@ -142,12 +142,12 @@ class CheckConstraintSuite extends QueryTest with 
CommandSuiteBase with DDLComma
 
   def getConstraintCharacteristics(): Seq[(String, String)] = {
     Seq(
-      ("", s"ENFORCED VALID NORELY"),
-      ("NORELY", s"ENFORCED VALID NORELY"),
-      ("RELY", s"ENFORCED VALID RELY"),
-      ("ENFORCED", s"ENFORCED VALID NORELY"),
-      ("ENFORCED NORELY", s"ENFORCED VALID NORELY"),
-      ("ENFORCED RELY", s"ENFORCED VALID RELY")
+      ("", s"ENFORCED NORELY"),
+      ("NORELY", s"ENFORCED NORELY"),
+      ("RELY", s"ENFORCED RELY"),
+      ("ENFORCED", s"ENFORCED NORELY"),
+      ("ENFORCED NORELY", s"ENFORCED NORELY"),
+      ("ENFORCED RELY", s"ENFORCED RELY")
     )
   }
 
@@ -176,7 +176,7 @@ class CheckConstraintSuite extends QueryTest with 
CommandSuiteBase with DDLComma
             val constraint = getCheckConstraint(table)
             assert(constraint.name() == "c1")
             assert(constraint.toDDL ==
-              s"CONSTRAINT c1 CHECK (LENGTH(name) > 0) ENFORCED VALID NORELY")
+              s"CONSTRAINT c1 CHECK (LENGTH(name) > 0) ENFORCED NORELY")
             assert(constraint.predicateSql() == "LENGTH(name) > 0")
           }
         }
@@ -258,7 +258,7 @@ class CheckConstraintSuite extends QueryTest with 
CommandSuiteBase with DDLComma
       val constraint = getCheckConstraint(table)
       assert(constraint.name() == "valid_positive_num")
       assert(constraint.toDDL ==
-        "CONSTRAINT valid_positive_num CHECK (s.num >= -1) ENFORCED VALID 
NORELY")
+        "CONSTRAINT valid_positive_num CHECK (s.num >= -1) ENFORCED NORELY")
     }
   }
 
@@ -288,7 +288,7 @@ class CheckConstraintSuite extends QueryTest with 
CommandSuiteBase with DDLComma
       val constraint = getCheckConstraint(table)
       assert(constraint.name() == "valid_map_val")
       assert(constraint.toDDL ==
-        "CONSTRAINT valid_map_val CHECK (m['a'] >= -1) ENFORCED VALID NORELY")
+        "CONSTRAINT valid_map_val CHECK (m['a'] >= -1) ENFORCED NORELY")
     }
   }
 
@@ -316,7 +316,7 @@ class CheckConstraintSuite extends QueryTest with 
CommandSuiteBase with DDLComma
       val constraint = getCheckConstraint(table)
       assert(constraint.name() == "valid_array")
       assert(constraint.toDDL ==
-        "CONSTRAINT valid_array CHECK (a[1] >= -2) ENFORCED VALID NORELY")
+        "CONSTRAINT valid_array CHECK (a[1] >= -2) ENFORCED NORELY")
     }
   }
 
@@ -336,7 +336,7 @@ class CheckConstraintSuite extends QueryTest with 
CommandSuiteBase with DDLComma
           condition = "CONSTRAINT_ALREADY_EXISTS",
           sqlState = "42710",
           parameters = Map("constraintName" -> "abc",
-            "oldConstraint" -> "CONSTRAINT abc CHECK (id > 0) ENFORCED VALID 
NORELY")
+            "oldConstraint" -> "CONSTRAINT abc CHECK (id > 0) ENFORCED NORELY")
         )
       }
     }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ForeignKeyConstraintSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ForeignKeyConstraintSuite.scala
index 02646a3cfcbb..a876013490ea 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ForeignKeyConstraintSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ForeignKeyConstraintSuite.scala
@@ -23,12 +23,12 @@ class ForeignKeyConstraintSuite extends QueryTest with 
CommandSuiteBase with DDL
   override protected def command: String = "FOREIGN KEY CONSTRAINT"
 
   private val validConstraintCharacteristics = Seq(
-    ("", "NOT ENFORCED UNVALIDATED NORELY"),
-    ("NOT ENFORCED", "NOT ENFORCED UNVALIDATED NORELY"),
-    ("NOT ENFORCED NORELY", "NOT ENFORCED UNVALIDATED NORELY"),
-    ("NORELY NOT ENFORCED", "NOT ENFORCED UNVALIDATED NORELY"),
-    ("NORELY", "NOT ENFORCED UNVALIDATED NORELY"),
-    ("RELY", "NOT ENFORCED UNVALIDATED RELY")
+    ("", "NOT ENFORCED NORELY"),
+    ("NOT ENFORCED", "NOT ENFORCED NORELY"),
+    ("NOT ENFORCED NORELY", "NOT ENFORCED NORELY"),
+    ("NORELY NOT ENFORCED", "NOT ENFORCED NORELY"),
+    ("NORELY", "NOT ENFORCED NORELY"),
+    ("RELY", "NOT ENFORCED RELY")
   )
 
   test("Add foreign key constraint") {
@@ -104,7 +104,7 @@ class ForeignKeyConstraintSuite extends QueryTest with 
CommandSuiteBase with DDL
           parameters = Map("constraintName" -> "fk1",
             "oldConstraint" ->
               ("CONSTRAINT fk1 FOREIGN KEY (fk) " +
-                "REFERENCES test_catalog.ns.tbl_ref (id) NOT ENFORCED 
UNVALIDATED NORELY"))
+                "REFERENCES test_catalog.ns.tbl_ref (id) NOT ENFORCED NORELY"))
         )
       }
     }
@@ -124,7 +124,7 @@ class ForeignKeyConstraintSuite extends QueryTest with 
CommandSuiteBase with DDL
       assert(constraint.name() == "fk1")
       assert(constraint.toDDL ==
         s"CONSTRAINT fk1 FOREIGN KEY (fk1, fk2) " +
-          s"REFERENCES test_catalog.ns.tbl_ref (id1, id2) NOT ENFORCED 
UNVALIDATED NORELY")
+          s"REFERENCES test_catalog.ns.tbl_ref (id1, id2) NOT ENFORCED NORELY")
     }
   }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/PrimaryKeyConstraintSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/PrimaryKeyConstraintSuite.scala
index a4785e953a2d..f692f9588161 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/PrimaryKeyConstraintSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/PrimaryKeyConstraintSuite.scala
@@ -23,12 +23,12 @@ class PrimaryKeyConstraintSuite extends QueryTest with 
CommandSuiteBase with DDL
   override protected def command: String = "PRIMARY KEY CONSTRAINT"
 
   private val validConstraintCharacteristics = Seq(
-    ("", "NOT ENFORCED UNVALIDATED NORELY"),
-    ("NOT ENFORCED", "NOT ENFORCED UNVALIDATED NORELY"),
-    ("NOT ENFORCED NORELY", "NOT ENFORCED UNVALIDATED NORELY"),
-    ("NORELY NOT ENFORCED", "NOT ENFORCED UNVALIDATED NORELY"),
-    ("NORELY", "NOT ENFORCED UNVALIDATED NORELY"),
-    ("RELY", "NOT ENFORCED UNVALIDATED RELY")
+    ("", "NOT ENFORCED NORELY"),
+    ("NOT ENFORCED", "NOT ENFORCED NORELY"),
+    ("NOT ENFORCED NORELY", "NOT ENFORCED NORELY"),
+    ("NORELY NOT ENFORCED", "NOT ENFORCED NORELY"),
+    ("NORELY", "NOT ENFORCED NORELY"),
+    ("RELY", "NOT ENFORCED RELY")
   )
 
   test("Add primary key constraint") {
@@ -92,7 +92,7 @@ class PrimaryKeyConstraintSuite extends QueryTest with 
CommandSuiteBase with DDL
           condition = "CONSTRAINT_ALREADY_EXISTS",
           sqlState = "42710",
           parameters = Map("constraintName" -> "pk1",
-            "oldConstraint" -> "CONSTRAINT pk1 PRIMARY KEY (id) NOT ENFORCED 
UNVALIDATED NORELY")
+            "oldConstraint" -> "CONSTRAINT pk1 PRIMARY KEY (id) NOT ENFORCED 
NORELY")
         )
       }
     }
@@ -109,7 +109,7 @@ class PrimaryKeyConstraintSuite extends QueryTest with 
CommandSuiteBase with DDL
       val constraint = table.constraints.head
       assert(constraint.name() == "pk1")
       assert(constraint.toDDL ==
-        "CONSTRAINT pk1 PRIMARY KEY (id1, id2) NOT ENFORCED UNVALIDATED 
NORELY")
+        "CONSTRAINT pk1 PRIMARY KEY (id1, id2) NOT ENFORCED NORELY")
     }
   }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowCreateTableSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowCreateTableSuite.scala
index f72127cbd1de..2e3929d906ce 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowCreateTableSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/ShowCreateTableSuite.scala
@@ -181,4 +181,60 @@ class ShowCreateTableSuite extends 
command.ShowCreateTableSuiteBase with Command
       )
     }
   }
+
+  test("show table constraints") {
+    withNamespaceAndTable("ns", "tbl", nonPartitionCatalog) { t =>
+      withTable("other_table") {
+        sql(
+          s"""
+             |CREATE TABLE other_table (
+             |  id STRING PRIMARY KEY
+             |)
+             |USING parquet
+        """.stripMargin)
+        sql(
+          s"""
+             |CREATE TABLE $t (
+             |  a INT,
+             |  b STRING,
+             |  c STRING,
+             |  PRIMARY KEY (a),
+             |  CONSTRAINT uk_b UNIQUE (b),
+             |  CONSTRAINT fk_c FOREIGN KEY (c) REFERENCES other_table(id) 
RELY,
+             |  CONSTRAINT c1 CHECK (c IS NOT NULL),
+             |  CONSTRAINT c2 CHECK (a > 0)
+             |)
+             |$defaultUsing
+        """.stripMargin)
+        var showDDL = getShowCreateDDL(t)
+        val expectedDDLPrefix = Array(
+          s"CREATE TABLE $nonPartitionCatalog.ns.tbl (",
+          "a INT NOT NULL,",
+          "b STRING,",
+          "c STRING,",
+          "CONSTRAINT tbl_pk PRIMARY KEY (a) NOT ENFORCED NORELY,",
+          "CONSTRAINT uk_b UNIQUE (b) NOT ENFORCED NORELY,",
+          "CONSTRAINT fk_c FOREIGN KEY (c) REFERENCES other_table (id) NOT 
ENFORCED RELY,",
+          "CONSTRAINT c1 CHECK (c IS NOT NULL) ENFORCED NORELY,"
+        )
+        assert(showDDL === expectedDDLPrefix ++ Array(
+          "CONSTRAINT c2 CHECK (a > 0) ENFORCED NORELY)",
+          defaultUsing))
+
+        sql(s"ALTER TABLE $t ADD CONSTRAINT c3 CHECK (b IS NOT NULL) ENFORCED 
RELY")
+        showDDL = getShowCreateDDL(t)
+        val expectedDDLArrayWithNewConstraint = expectedDDLPrefix ++ Array(
+          "CONSTRAINT c2 CHECK (a > 0) ENFORCED NORELY,",
+          "CONSTRAINT c3 CHECK (b IS NOT NULL) ENFORCED RELY)",
+          defaultUsing
+        )
+        assert(showDDL === expectedDDLArrayWithNewConstraint)
+        sql(s"ALTER TABLE $t DROP CONSTRAINT c1")
+        showDDL = getShowCreateDDL(t)
+        val expectedDDLArrayAfterDrop = 
expectedDDLArrayWithNewConstraint.filterNot(
+          _.contains("c1 CHECK (c IS NOT NULL) ENFORCED NORELY"))
+        assert(showDDL === expectedDDLArrayAfterDrop)
+      }
+    }
+  }
 }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/UniqueConstraintSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/UniqueConstraintSuite.scala
index 9446cbc6ade2..6efc3912af9d 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/UniqueConstraintSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/UniqueConstraintSuite.scala
@@ -23,12 +23,12 @@ class UniqueConstraintSuite extends QueryTest with 
CommandSuiteBase with DDLComm
   override protected def command: String = "UNIQUE CONSTRAINT"
 
   private val validConstraintCharacteristics = Seq(
-    ("", "NOT ENFORCED UNVALIDATED NORELY"),
-    ("NOT ENFORCED", "NOT ENFORCED UNVALIDATED NORELY"),
-    ("NOT ENFORCED NORELY", "NOT ENFORCED UNVALIDATED NORELY"),
-    ("NORELY NOT ENFORCED", "NOT ENFORCED UNVALIDATED NORELY"),
-    ("NORELY", "NOT ENFORCED UNVALIDATED NORELY"),
-    ("RELY", "NOT ENFORCED UNVALIDATED RELY")
+    ("", "NOT ENFORCED NORELY"),
+    ("NOT ENFORCED", "NOT ENFORCED NORELY"),
+    ("NOT ENFORCED NORELY", "NOT ENFORCED NORELY"),
+    ("NORELY NOT ENFORCED", "NOT ENFORCED NORELY"),
+    ("NORELY", "NOT ENFORCED NORELY"),
+    ("RELY", "NOT ENFORCED RELY")
   )
 
   test("Add unique constraint") {
@@ -92,7 +92,7 @@ class UniqueConstraintSuite extends QueryTest with 
CommandSuiteBase with DDLComm
           condition = "CONSTRAINT_ALREADY_EXISTS",
           sqlState = "42710",
           parameters = Map("constraintName" -> "uk1",
-            "oldConstraint" -> "CONSTRAINT uk1 UNIQUE (id) NOT ENFORCED 
UNVALIDATED NORELY")
+            "oldConstraint" -> "CONSTRAINT uk1 UNIQUE (id) NOT ENFORCED 
NORELY")
         )
       }
     }
@@ -109,7 +109,7 @@ class UniqueConstraintSuite extends QueryTest with 
CommandSuiteBase with DDLComm
       val constraint = table.constraints.head
       assert(constraint.name() == "uk1")
       assert(constraint.toDDL ==
-        "CONSTRAINT uk1 UNIQUE (id1, id2) NOT ENFORCED UNVALIDATED NORELY")
+        "CONSTRAINT uk1 UNIQUE (id1, id2) NOT ENFORCED NORELY")
     }
   }
 }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to