sunchao commented on a change in pull request #30491:
URL: https://github.com/apache/spark/pull/30491#discussion_r532346895
##########
File path:
sql/core/src/main/scala/org/apache/spark/sql/execution/datasources/v2/RefreshTableExec.scala
##########
@@ -17,23 +17,20 @@
package org.apache.spark.sql.execution.datasources.v2
-import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.catalyst.InternalRow
import org.apache.spark.sql.catalyst.expressions.Attribute
-import org.apache.spark.sql.connector.catalog.{Identifier, Table, TableCatalog}
+import org.apache.spark.sql.connector.catalog.{Identifier, TableCatalog}
case class RefreshTableExec(
- session: SparkSession,
catalog: TableCatalog,
- table: Table,
- ident: Identifier) extends V2CommandExec {
+ ident: Identifier,
+ invalidateCache: () => Unit) extends V2CommandExec {
override protected def run(): Seq[InternalRow] = {
catalog.invalidateTable(ident)
// invalidate all caches referencing the given table
// TODO(SPARK-33437): re-cache the table itself once we support caching a
DSv2 table
Review comment:
Sure. Will do that soon.
----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]