cloud-fan commented on a change in pull request #25115: [SPARK-28351][SQL] 
Support DELETE in DataSource V2
URL: https://github.com/apache/spark/pull/25115#discussion_r308684748
 
 

 ##########
 File path: 
sql/core/src/test/scala/org/apache/spark/sql/sources/v2/TestInMemoryTableCatalog.scala
 ##########
 @@ -252,6 +255,45 @@ class InMemoryTable(
       withData(messages.map(_.asInstanceOf[BufferedRows]))
     }
   }
+
+  override def deleteWhere(filters: Array[Filter]): Unit = 
dataMap.synchronized {
+    val filtered = data.map {
+      rows =>
+        val newRows = filter(rows.rows, filters)
+        val newBufferedRows = new BufferedRows()
+        newBufferedRows.rows.appendAll(newRows)
+        newBufferedRows
+    }.filter(_.rows.nonEmpty)
+    dataMap.clear()
+    withData(filtered)
+  }
+
+  def filter(rows: mutable.ArrayBuffer[InternalRow],
+      filters: Array[Filter]): Array[InternalRow] = {
+    if (rows.isEmpty) {
+      rows.toArray
+    }
+    val filterStr =
+      filters.map {
+        filter => filter.sql
 
 Review comment:
   +1. Ideally the real implementation should build its own filter evaluator, 
instead of using Spark `Expression`. See `ParquetFilters` as an example.
   
   We don't need a complete implementation in the test. The idea of only 
supporting equality filters and partition keys sounds pretty good.

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to