This is an automated email from the ASF dual-hosted git repository.

gurwls223 pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/spark.git


The following commit(s) were added to refs/heads/master by this push:
     new 3f8a8b36d26 [SPARK-40798][SQL][FOLLOW-UP] Fix ansi mode in v2 ALTER 
TABLE PARTITION
3f8a8b36d26 is described below

commit 3f8a8b36d264acdf688ea03033371b1171bfedf0
Author: ulysses-you <ulyssesyo...@gmail.com>
AuthorDate: Thu Nov 3 19:28:51 2022 +0900

    [SPARK-40798][SQL][FOLLOW-UP] Fix ansi mode in v2 ALTER TABLE PARTITION
    
    ### What changes were proposed in this pull request?
    
    Change to plain cast to `PartitioningUtils.castPartitionSpec` in 
convertToPartIdent. So the behavior can follow the `STORE_ASSIGNMENT_POLICY`.
    
    ### Why are the changes needed?
    
    Make v2 code path ALTER PARTITION follows `STORE_ASSIGNMENT_POLICY` in ansi 
mode.
    
    ### Does this PR introduce _any_ user-facing change?
    
    no
    
    ### How was this patch tested?
    
    remove the test
    `SPARK-40798: Alter partition should verify partition value - legacy`.
    
    This change has been convered at :
    `SPARK-40798: Alter partition should verify partition value`
    
    Closes #38449 from ulysses-you/SPARK-40798-follow.
    
    Authored-by: ulysses-you <ulyssesyo...@gmail.com>
    Signed-off-by: Hyukjin Kwon <gurwls...@apache.org>
---
 .../spark/sql/catalyst/analysis/ResolvePartitionSpec.scala       | 9 +++++++--
 .../sql/execution/command/v2/AlterTableAddPartitionSuite.scala   | 4 +---
 2 files changed, 8 insertions(+), 5 deletions(-)

diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolvePartitionSpec.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolvePartitionSpec.scala
index e09991cb2b9..90a502653d0 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolvePartitionSpec.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolvePartitionSpec.scala
@@ -25,8 +25,9 @@ import org.apache.spark.sql.catalyst.rules.Rule
 import org.apache.spark.sql.catalyst.trees.TreePattern.COMMAND
 import org.apache.spark.sql.catalyst.util.CharVarcharUtils
 import org.apache.spark.sql.connector.catalog.SupportsPartitionManagement
+import org.apache.spark.sql.internal.SQLConf
 import org.apache.spark.sql.types._
-import org.apache.spark.sql.util.PartitioningUtils.{normalizePartitionSpec, 
requireExactMatchedPartitionSpec}
+import org.apache.spark.sql.util.PartitioningUtils.{castPartitionSpec, 
normalizePartitionSpec, requireExactMatchedPartitionSpec}
 
 /**
  * Resolve [[UnresolvedPartitionSpec]] to [[ResolvedPartitionSpec]] in 
partition related commands.
@@ -78,7 +79,11 @@ object ResolvePartitionSpec extends Rule[LogicalPlan] {
     val partValues = schema.map { part =>
       val raw = partitionSpec.get(part.name).orNull
       val dt = CharVarcharUtils.replaceCharVarcharWithString(part.dataType)
-      Cast(Literal.create(raw, StringType), dt, 
Some(conf.sessionLocalTimeZone)).eval()
+      if 
(SQLConf.get.getConf(SQLConf.SKIP_TYPE_VALIDATION_ON_ALTER_PARTITION)) {
+        Cast(Literal.create(raw, StringType), dt, 
Some(conf.sessionLocalTimeZone)).eval()
+      } else {
+        castPartitionSpec(raw, dt, conf).eval()
+      }
     }
     InternalRow.fromSeq(partValues)
   }
diff --git 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala
 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala
index c33d9b0101a..a9ab11e483f 100644
--- 
a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala
+++ 
b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/v2/AlterTableAddPartitionSuite.scala
@@ -129,9 +129,7 @@ class AlterTableAddPartitionSuite
     withNamespaceAndTable("ns", "tbl") { t =>
       sql(s"CREATE TABLE $t (c int) $defaultUsing PARTITIONED BY (p int)")
 
-      withSQLConf(
-          SQLConf.SKIP_TYPE_VALIDATION_ON_ALTER_PARTITION.key -> "true",
-          SQLConf.ANSI_ENABLED.key -> "false") {
+      withSQLConf(SQLConf.SKIP_TYPE_VALIDATION_ON_ALTER_PARTITION.key -> 
"true") {
         sql(s"ALTER TABLE $t ADD PARTITION (p='aaa')")
         checkPartitions(t, Map("p" -> defaultPartitionName))
         sql(s"ALTER TABLE $t DROP PARTITION (p=null)")


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscr...@spark.apache.org
For additional commands, e-mail: commits-h...@spark.apache.org

Reply via email to