I am not using any extra context not sure why it is not getting the table.

 //import org.apache.spark.sql.SQLContext
 import org.apache.spark.sql._
 //import sqlContext.implicits._

 //val sqlContext = new SQLContext(sc)

 case class CEStats(statstype: String, bootstrap: String, threshold : String, 
totalTypeCount : String, ratio : String, testCount : String)

 val rawData = 
sc.textFile("/user/cloudera/zepplin/mergeStatisticsData.txt").map(_.split(",")).map(p
 => CEStats(p(0), p(1), p(2), p(3), p(4), p(5))).toDF().cache()

 rawData.printSchema()
 rawData.registerTempTable("test")

 // This works fine.
 sqlContext.sql("SELECT COUNT(DISTINCT bootstrap) FROM test ").show()

 //import org.apache.spark.sql.SQLContext
 import org.apache.spark.sql._
 //import sqlContext.implicits._

 //val sqlContext = new SQLContext(sc)

 case class CEStats(statstype: String, bootstrap: String, threshold : String, 
totalTypeCount : String, ratio : String, testCount : String)

 val rawData = 
sc.textFile("/user/cloudera/zepplin/mergeStatisticsData.txt").map(_.split(",")).map(p
 => CEStats(p(0), p(1), p(2), p(3), p(4), p(5))).toDF().cache()

 rawData.printSchema()
 rawData.registerTempTable("test")

 // This works fine.
 sqlContext.sql("SELECT COUNT(DISTINCT bootstrap) FROM test ").show()

 //import org.apache.spark.sql.SQLContext
 import org.apache.spark.sql._
 //import sqlContext.implicits._

 //val sqlContext = new SQLContext(sc)

 case class CEStats(statstype: String, bootstrap: String, threshold : String, 
totalTypeCount : String, ratio : String, testCount : String)

 val rawData = 
sc.textFile("/user/cloudera/zepplin/mergeStatisticsData.txt").map(_.split(",")).map(p
 => CEStats(p(0), p(1), p(2), p(3), p(4), p(5))).toDF().cache()

 rawData.printSchema()
 rawData.registerTempTable("test")

 // This works fine.
 sqlContext.sql("SELECT COUNT(DISTINCT bootstrap) FROM test ").show()

%sql select count(distinct bootstrap) from test

org.apache.spark.sql.AnalysisException: no such table test; line 1 pos 38 at 
org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.failAnalysis(package.scala:42)
 at 
org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.getTable(Analyzer.scala:260)
 at 
org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$7.applyOrElse(Analyzer.scala:268)
 at 
org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$7.applyOrElse(Analyzer.scala:264)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$resolveOperators$1.apply(LogicalPlan.scala:57)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$resolveOperators$1.apply(LogicalPlan.scala:57)
 at 
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:51)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperators(LogicalPlan.scala:56)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$1.apply(LogicalPlan.scala:54)
 at 
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$1.apply(LogicalPlan.scala:54)
________________________________
From: Srivastava, Rachana (IP&Science)
Sent: Friday, April 29, 2016 6:05 AM
To: users@zeppelin.incubator.apache.org
Subject: Zeppelin Cannot Find Registered Table

I have a three lines program where I am registering a table and calling select 
* query. I know this is some issue with sqlcontext but I have not used any 
sqlcontext explicitly.

Following code works fine. I see my table registered:

case class CEStats(bootstrap: String, threshold : String, TP : String, FP : 
String, FN : String, TN : String, precision : String, recall : String)

val cestats = 
sc.textFile("/user/cloudera/zepplin/mergeStatisticsTest1Combine.txt").map(_.split(",")).map(p
 => CEStats(p(0), p(1), p(2), p(3), p(4), p(5), p(6), p(7))).toDF()

cestats.registerTempTable("table")

sqlContext.tableNames().foreach(println)

But when i call %sql select * from table I am getting this exception

org.apache.spark.sql.AnalysisException: no such table table; line 0 pos 0 at 
org.apache.spark.sql.catalyst.analysis.package$AnalysisErrorAt.failAnalysis(package.scala:42)
 at 
org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.getTable(Analyzer.scala:260)

Reply via email to