Repository: spark
Updated Branches:
  refs/heads/branch-1.0-jdbc 56d674fe7 -> cf0d14b01


[SPARK-2287] [SQL] Make ScalaReflection be able to handle Generic case classes.

Author: Takuya UESHIN <ues...@happy-camper.st>

Closes #1226 from ueshin/issues/SPARK-2287 and squashes the following commits:

32ef7c3 [Takuya UESHIN] Add execution of `SHOW TABLES` before 
`TestHive.reset()`.
541dc8d [Takuya UESHIN] Merge branch 'master' into issues/SPARK-2287
fac5fae [Takuya UESHIN] Remove unnecessary method receiver.
d306e60 [Takuya UESHIN] Merge branch 'master' into issues/SPARK-2287
7de5706 [Takuya UESHIN] Make ScalaReflection be able to handle Generic case 
classes.

(cherry picked from commit bc7041a42dfa84312492ea8cae6fdeaeac4f6d1c)
Signed-off-by: Michael Armbrust <mich...@databricks.com>


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/cf0d14b0
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/cf0d14b0
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/cf0d14b0

Branch: refs/heads/branch-1.0-jdbc
Commit: cf0d14b01d3869a0a4f6f3a4d00178c5dbb04784
Parents: 56d674f
Author: Takuya UESHIN <ues...@happy-camper.st>
Authored: Wed Jul 2 10:10:36 2014 -0700
Committer: Michael Armbrust <mich...@databricks.com>
Committed: Wed Jul 2 10:10:52 2014 -0700

----------------------------------------------------------------------
 .../spark/sql/catalyst/ScalaReflection.scala    |  7 +++++--
 .../sql/catalyst/ScalaReflectionSuite.scala     | 20 ++++++++++++++++++++
 2 files changed, 25 insertions(+), 2 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/cf0d14b0/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
index ada48ea..5a55be1 100644
--- 
a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
+++ 
b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/ScalaReflection.scala
@@ -47,10 +47,13 @@ object ScalaReflection {
       val TypeRef(_, _, Seq(optType)) = t
       Schema(schemaFor(optType).dataType, nullable = true)
     case t if t <:< typeOf[Product] =>
-      val params = t.member("<init>": TermName).asMethod.paramss
+      val formalTypeArgs = t.typeSymbol.asClass.typeParams
+      val TypeRef(_, _, actualTypeArgs) = t
+      val params = t.member(nme.CONSTRUCTOR).asMethod.paramss
       Schema(StructType(
         params.head.map { p =>
-          val Schema(dataType, nullable) = schemaFor(p.typeSignature)
+          val Schema(dataType, nullable) =
+            schemaFor(p.typeSignature.substituteTypes(formalTypeArgs, 
actualTypeArgs))
           StructField(p.name.toString, dataType, nullable)
         }), nullable = true)
     // Need to decide if we actually need a special type here.

http://git-wip-us.apache.org/repos/asf/spark/blob/cf0d14b0/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
----------------------------------------------------------------------
diff --git 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
index 489d7e9..c0438db 100644
--- 
a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
+++ 
b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/ScalaReflectionSuite.scala
@@ -60,6 +60,9 @@ case class ComplexData(
     mapField: Map[Int, String],
     structField: PrimitiveData)
 
+case class GenericData[A](
+    genericField: A)
+
 class ScalaReflectionSuite extends FunSuite {
   import ScalaReflection._
 
@@ -128,4 +131,21 @@ class ScalaReflectionSuite extends FunSuite {
           nullable = true))),
       nullable = true))
   }
+
+  test("generic data") {
+    val schema = schemaFor[GenericData[Int]]
+    assert(schema === Schema(
+      StructType(Seq(
+        StructField("genericField", IntegerType, nullable = false))),
+      nullable = true))
+  }
+
+  test("tuple data") {
+    val schema = schemaFor[(Int, String)]
+    assert(schema === Schema(
+      StructType(Seq(
+        StructField("_1", IntegerType, nullable = false),
+        StructField("_2", StringType, nullable = true))),
+      nullable = true))
+  }
 }

Reply via email to