Hey all,
We ran into some test failures in our internal branch (which builds
against Hive 1.1), and I narrowed it down to the fix below. I'm not
super familiar with the Hive integration code, but does this look like
a bug for other versions of Hive too?
This caused an error where some internal Hive configuration that is
initialized by the session were not available.
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index dd06b26..6242745 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -93,6 +93,10 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
if (conf.dialect == "sql") {
super.sql(substituted)
} else if (conf.dialect == "hiveql") {
+ // Make sure Hive session state is initialized.
+ if (SessionState.get() != sessionState) {
+ SessionState.start(sessionState)
+ }
val ddlPlan = ddlParserWithHiveQL.parse(sqlText,
exceptionOnError = false)
DataFrame(this, ddlPlan.getOrElse(HiveQl.parseSql(substituted)))
} else {
--
Marcelo
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]