spark git commit: Revert [SPARK-4309][SPARK-4407][SQL] Date type support for Thrift server, and fixes for complex types

2014-11-16 Thread marmbrus
Repository: spark
Updated Branches:
  refs/heads/master cb6bd83a9 - 45ce3273c


Revert [SPARK-4309][SPARK-4407][SQL] Date type support for Thrift server, and 
fixes for complex types

Author: Michael Armbrust mich...@databricks.com

Closes #3292 from marmbrus/revert4309 and squashes the following commits:

808e96e [Michael Armbrust] Revert [SPARK-4309][SPARK-4407][SQL] Date type 
support for Thrift server, and fixes for complex types


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/45ce3273
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/45ce3273
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/45ce3273

Branch: refs/heads/master
Commit: 45ce3273cb618d14ec4d20c4c95699634b951086
Parents: cb6bd83
Author: Michael Armbrust mich...@databricks.com
Authored: Sun Nov 16 15:05:04 2014 -0800
Committer: Michael Armbrust mich...@databricks.com
Committed: Sun Nov 16 15:05:08 2014 -0800

--
 .../thriftserver/HiveThriftServer2Suite.scala   |  90 -
 .../spark/sql/hive/thriftserver/Shim12.scala|  11 +-
 .../spark/sql/hive/thriftserver/Shim13.scala|  29 +++--
 .../org/apache/spark/sql/hive/HiveContext.scala | 127 ++-
 4 files changed, 115 insertions(+), 142 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/45ce3273/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala
--
diff --git 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala
 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala
index 23d12cb..bba29b2 100644
--- 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala
+++ 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala
@@ -19,10 +19,9 @@ package org.apache.spark.sql.hive.thriftserver
 
 import java.io.File
 import java.net.ServerSocket
-import java.sql.{Date, DriverManager, Statement}
+import java.sql.{DriverManager, Statement}
 import java.util.concurrent.TimeoutException
 
-import scala.collection.JavaConversions._
 import scala.collection.mutable.ArrayBuffer
 import scala.concurrent.duration._
 import scala.concurrent.{Await, Promise}
@@ -52,15 +51,6 @@ import org.apache.spark.sql.hive.HiveShim
 class HiveThriftServer2Suite extends FunSuite with Logging {
   Class.forName(classOf[HiveDriver].getCanonicalName)
 
-  object TestData {
-def getTestDataFilePath(name: String) = {
-  
Thread.currentThread().getContextClassLoader.getResource(sdata/files/$name)
-}
-
-val smallKv = getTestDataFilePath(small_kv.txt)
-val smallKvWithNull = getTestDataFilePath(small_kv_with_null.txt)
-  }
-
   def randomListeningPort =  {
 // Let the system to choose a random available port to avoid collision 
with other parallel
 // builds.
@@ -155,8 +145,12 @@ class HiveThriftServer2Suite extends FunSuite with Logging 
{
   }
 }
 
-// Resets SPARK_TESTING to avoid loading Log4J configurations in testing 
class paths
-val env = Seq(SPARK_TESTING - 0)
+val env = Seq(
+  // Resets SPARK_TESTING to avoid loading Log4J configurations in testing 
class paths
+  SPARK_TESTING - 0,
+  // Prevents loading classes out of the assembly jar. Otherwise 
Utils.sparkVersion can't read
+  // proper version information from the jar manifest.
+  SPARK_PREPEND_CLASSES - )
 
 Process(command, None, env: _*).run(ProcessLogger(
   captureThriftServerOutput(stdout),
@@ -200,12 +194,15 @@ class HiveThriftServer2Suite extends FunSuite with 
Logging {
 
   test(Test JDBC query execution) {
 withJdbcStatement() { statement =
-  val queries = Seq(
-SET spark.sql.shuffle.partitions=3,
-DROP TABLE IF EXISTS test,
-CREATE TABLE test(key INT, val STRING),
-sLOAD DATA LOCAL INPATH '${TestData.smallKv}' OVERWRITE INTO TABLE 
test,
-CACHE TABLE test)
+  val dataFilePath =
+
Thread.currentThread().getContextClassLoader.getResource(data/files/small_kv.txt)
+
+  val queries =
+sSET spark.sql.shuffle.partitions=3;
+   |CREATE TABLE test(key INT, val STRING);
+   |LOAD DATA LOCAL INPATH '$dataFilePath' OVERWRITE INTO TABLE test;
+   |CACHE TABLE test;
+ .stripMargin.split(;).map(_.trim).filter(_.nonEmpty)
 
   queries.foreach(statement.execute)
 
@@ -219,10 +216,14 @@ class HiveThriftServer2Suite extends FunSuite with 
Logging {
 
   test(SPARK-3004 regression: result set containing NULL) {
 withJdbcStatement() { statement =
+  val dataFilePath =
+

spark git commit: Revert [SPARK-4309][SPARK-4407][SQL] Date type support for Thrift server, and fixes for complex types

2014-11-16 Thread marmbrus
Repository: spark
Updated Branches:
  refs/heads/branch-1.2 8b83a34fa - 70d037168


Revert [SPARK-4309][SPARK-4407][SQL] Date type support for Thrift server, and 
fixes for complex types

Author: Michael Armbrust mich...@databricks.com

Closes #3292 from marmbrus/revert4309 and squashes the following commits:

808e96e [Michael Armbrust] Revert [SPARK-4309][SPARK-4407][SQL] Date type 
support for Thrift server, and fixes for complex types

(cherry picked from commit 45ce3273cb618d14ec4d20c4c95699634b951086)
Signed-off-by: Michael Armbrust mich...@databricks.com


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/70d03716
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/70d03716
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/70d03716

Branch: refs/heads/branch-1.2
Commit: 70d0371683a56059a7b4c4ebdab6e2fe055b9a76
Parents: 8b83a34
Author: Michael Armbrust mich...@databricks.com
Authored: Sun Nov 16 15:05:04 2014 -0800
Committer: Michael Armbrust mich...@databricks.com
Committed: Sun Nov 16 15:05:30 2014 -0800

--
 .../thriftserver/HiveThriftServer2Suite.scala   |  90 -
 .../spark/sql/hive/thriftserver/Shim12.scala|  11 +-
 .../spark/sql/hive/thriftserver/Shim13.scala|  29 +++--
 .../org/apache/spark/sql/hive/HiveContext.scala | 127 ++-
 4 files changed, 115 insertions(+), 142 deletions(-)
--


http://git-wip-us.apache.org/repos/asf/spark/blob/70d03716/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala
--
diff --git 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala
 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala
index 23d12cb..bba29b2 100644
--- 
a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala
+++ 
b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suite.scala
@@ -19,10 +19,9 @@ package org.apache.spark.sql.hive.thriftserver
 
 import java.io.File
 import java.net.ServerSocket
-import java.sql.{Date, DriverManager, Statement}
+import java.sql.{DriverManager, Statement}
 import java.util.concurrent.TimeoutException
 
-import scala.collection.JavaConversions._
 import scala.collection.mutable.ArrayBuffer
 import scala.concurrent.duration._
 import scala.concurrent.{Await, Promise}
@@ -52,15 +51,6 @@ import org.apache.spark.sql.hive.HiveShim
 class HiveThriftServer2Suite extends FunSuite with Logging {
   Class.forName(classOf[HiveDriver].getCanonicalName)
 
-  object TestData {
-def getTestDataFilePath(name: String) = {
-  
Thread.currentThread().getContextClassLoader.getResource(sdata/files/$name)
-}
-
-val smallKv = getTestDataFilePath(small_kv.txt)
-val smallKvWithNull = getTestDataFilePath(small_kv_with_null.txt)
-  }
-
   def randomListeningPort =  {
 // Let the system to choose a random available port to avoid collision 
with other parallel
 // builds.
@@ -155,8 +145,12 @@ class HiveThriftServer2Suite extends FunSuite with Logging 
{
   }
 }
 
-// Resets SPARK_TESTING to avoid loading Log4J configurations in testing 
class paths
-val env = Seq(SPARK_TESTING - 0)
+val env = Seq(
+  // Resets SPARK_TESTING to avoid loading Log4J configurations in testing 
class paths
+  SPARK_TESTING - 0,
+  // Prevents loading classes out of the assembly jar. Otherwise 
Utils.sparkVersion can't read
+  // proper version information from the jar manifest.
+  SPARK_PREPEND_CLASSES - )
 
 Process(command, None, env: _*).run(ProcessLogger(
   captureThriftServerOutput(stdout),
@@ -200,12 +194,15 @@ class HiveThriftServer2Suite extends FunSuite with 
Logging {
 
   test(Test JDBC query execution) {
 withJdbcStatement() { statement =
-  val queries = Seq(
-SET spark.sql.shuffle.partitions=3,
-DROP TABLE IF EXISTS test,
-CREATE TABLE test(key INT, val STRING),
-sLOAD DATA LOCAL INPATH '${TestData.smallKv}' OVERWRITE INTO TABLE 
test,
-CACHE TABLE test)
+  val dataFilePath =
+
Thread.currentThread().getContextClassLoader.getResource(data/files/small_kv.txt)
+
+  val queries =
+sSET spark.sql.shuffle.partitions=3;
+   |CREATE TABLE test(key INT, val STRING);
+   |LOAD DATA LOCAL INPATH '$dataFilePath' OVERWRITE INTO TABLE test;
+   |CACHE TABLE test;
+ .stripMargin.split(;).map(_.trim).filter(_.nonEmpty)
 
   queries.foreach(statement.execute)
 
@@ -219,10 +216,14 @@ class HiveThriftServer2Suite extends FunSuite with 
Logging {
 
   test(SPARK-3004 regression: