This is an automated email from the ASF dual-hosted git repository. lzljs3620320 pushed a commit to branch release-1.0 in repository https://gitbox.apache.org/repos/asf/paimon.git
commit 6398a334fd561364e7e4cadf4cff101859300c97 Author: xuzifu666 <[email protected]> AuthorDate: Wed Jan 1 22:32:20 2025 +0800 [spark] Purge file need refresh table avoid FileNotFound (#4809) --- .../main/java/org/apache/paimon/spark/procedure/PurgeFilesProcedure.java | 1 + .../org/apache/paimon/spark/procedure/PurgeFilesProcedureTest.scala | 1 - 2 files changed, 1 insertion(+), 1 deletion(-) diff --git a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/procedure/PurgeFilesProcedure.java b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/procedure/PurgeFilesProcedure.java index 8a7aec6e14..9db724294a 100644 --- a/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/procedure/PurgeFilesProcedure.java +++ b/paimon-spark/paimon-spark-common/src/main/java/org/apache/paimon/spark/procedure/PurgeFilesProcedure.java @@ -82,6 +82,7 @@ public class PurgeFilesProcedure extends BaseProcedure { throw new RuntimeException(e); } }); + spark().catalog().refreshTable(table.fullName()); } catch (IOException e) { throw new RuntimeException(e); } diff --git a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/procedure/PurgeFilesProcedureTest.scala b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/procedure/PurgeFilesProcedureTest.scala index 27eafe1c3d..b8911ada2f 100644 --- a/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/procedure/PurgeFilesProcedureTest.scala +++ b/paimon-spark/paimon-spark-ut/src/test/scala/org/apache/paimon/spark/procedure/PurgeFilesProcedureTest.scala @@ -36,7 +36,6 @@ class PurgeFilesProcedureTest extends PaimonSparkTestBase { spark.sql("CALL paimon.sys.purge_files(table => 'test.T')") checkAnswer(spark.sql("select * from test.T"), Nil) - spark.sql("refresh table test.T"); spark.sql("insert into T select '2', 'aa'"); checkAnswer(spark.sql("select * from test.T"), Row("2", "aa") :: Nil) }
