cloud-fan commented on a change in pull request #26809: [SPARK-30185][SQL] 
Implement Dataset.tail API
URL: https://github.com/apache/spark/pull/26809#discussion_r356027736
 
 

 ##########
 File path: 
sql/core/src/main/scala/org/apache/spark/sql/execution/SparkPlan.scala
 ##########
 @@ -426,23 +456,46 @@ abstract class SparkPlan extends QueryPlan[SparkPlan] 
with Logging with Serializ
         }
       }
 
-      val p = partsScanned.until(math.min(partsScanned + numPartsToTry, 
totalParts).toInt)
+      val parts = partsScanned.until(math.min(partsScanned + numPartsToTry, 
totalParts).toInt)
+      val partsToScan = if (reverse) {
+        // Reverse partitions to scan. So, if parts was [1, 2, 3] in 200 
partitions (0 to 199),
+        // it becomes [198, 197, 196].
+        parts.map(p => (totalParts - 1) - p)
+      } else {
+        parts
+      }
       val sc = sqlContext.sparkContext
       val res = sc.runJob(childRDD, (it: Iterator[(Long, Array[Byte])]) =>
-        if (it.hasNext) it.next() else (0L, Array.empty[Byte]), p)
+        if (it.hasNext) it.next() else (0L, Array.empty[Byte]), partsToScan)
 
       var i = 0
-      while (buf.length < n && i < res.length) {
-        val rows = decodeUnsafeRows(res(i)._2)
-        val rowsToTake = if (n - buf.length >= res(i)._1) {
-          rows.toArray
-        } else {
-          rows.take(n - buf.length).toArray
+
+      if (reverse) {
+        while (buf.length < n && i < res.length) {
+          val rows = decodeUnsafeRows(res(i)._2)
+          if (n - buf.length >= res(i)._1) {
+            buf.insertAll(0, rows.toArray[InternalRow])
 
 Review comment:
   is `ArrayBuffer` good at prepending?

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
[email protected]


With regards,
Apache Git Services

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to