Repository: spark
Updated Branches:
  refs/heads/master e6a02c66d -> dab246f7e


[SPARK-13098] [SQL] remove GenericInternalRowWithSchema

This class is only used for serialization of Python DataFrame. However, we 
don't require internal row there, so `GenericRowWithSchema` can also do the job.

Author: Wenchen Fan <[email protected]>

Closes #10992 from cloud-fan/python.


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/dab246f7
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/dab246f7
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/dab246f7

Branch: refs/heads/master
Commit: dab246f7e4664d36073ec49d9df8a11c5e998cdb
Parents: e6a02c6
Author: Wenchen Fan <[email protected]>
Authored: Fri Jan 29 23:37:51 2016 -0800
Committer: Davies Liu <[email protected]>
Committed: Fri Jan 29 23:37:51 2016 -0800

----------------------------------------------------------------------
 .../apache/spark/sql/catalyst/expressions/rows.scala   | 12 ------------
 .../scala/org/apache/spark/sql/execution/python.scala  | 13 +++++--------
 2 files changed, 5 insertions(+), 20 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/dab246f7/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/rows.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/rows.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/rows.scala
index 387d979..be6b253 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/rows.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/expressions/rows.scala
@@ -233,18 +233,6 @@ class GenericInternalRow(private[sql] val values: 
Array[Any]) extends BaseGeneri
   override def copy(): GenericInternalRow = this
 }
 
-/**
- * This is used for serialization of Python DataFrame
- */
-class GenericInternalRowWithSchema(values: Array[Any], val schema: StructType)
-  extends GenericInternalRow(values) {
-
-  /** No-arg constructor for serialization. */
-  protected def this() = this(null, null)
-
-  def fieldIndex(name: String): Int = schema.fieldIndex(name)
-}
-
 class GenericMutableRow(values: Array[Any]) extends MutableRow with 
BaseGenericInternalRow {
   /** No-arg constructor for serialization. */
   protected def this() = this(null)

http://git-wip-us.apache.org/repos/asf/spark/blob/dab246f7/sql/core/src/main/scala/org/apache/spark/sql/execution/python.scala
----------------------------------------------------------------------
diff --git 
a/sql/core/src/main/scala/org/apache/spark/sql/execution/python.scala 
b/sql/core/src/main/scala/org/apache/spark/sql/execution/python.scala
index e3a016e..bf62bb0 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/python.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/python.scala
@@ -143,7 +143,7 @@ object EvaluatePython {
         values(i) = toJava(row.get(i, struct.fields(i).dataType), 
struct.fields(i).dataType)
         i += 1
       }
-      new GenericInternalRowWithSchema(values, struct)
+      new GenericRowWithSchema(values, struct)
 
     case (a: ArrayData, array: ArrayType) =>
       val values = new java.util.ArrayList[Any](a.numElements())
@@ -199,10 +199,7 @@ object EvaluatePython {
 
     case (c: Long, TimestampType) => c
 
-    case (c: String, StringType) => UTF8String.fromString(c)
-    case (c, StringType) =>
-      // If we get here, c is not a string. Call toString on it.
-      UTF8String.fromString(c.toString)
+    case (c, StringType) => UTF8String.fromString(c.toString)
 
     case (c: String, BinaryType) => c.getBytes("utf-8")
     case (c, BinaryType) if c.getClass.isArray && 
c.getClass.getComponentType.getName == "byte" => c
@@ -263,11 +260,11 @@ object EvaluatePython {
   }
 
   /**
-   * Pickler for InternalRow
+   * Pickler for external row.
    */
   private class RowPickler extends IObjectPickler {
 
-    private val cls = classOf[GenericInternalRowWithSchema]
+    private val cls = classOf[GenericRowWithSchema]
 
     // register this to Pickler and Unpickler
     def register(): Unit = {
@@ -282,7 +279,7 @@ object EvaluatePython {
       } else {
         // it will be memorized by Pickler to save some bytes
         pickler.save(this)
-        val row = obj.asInstanceOf[GenericInternalRowWithSchema]
+        val row = obj.asInstanceOf[GenericRowWithSchema]
         // schema should always be same object for memoization
         pickler.save(row.schema)
         out.write(Opcodes.TUPLE1)


---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to