Repository: bigtop
Updated Branches:
  refs/heads/master 5efdbfbfa -> 96a2a4aca


BIGTOP-2019. BigPetStore Spark isn't compiling due to changes in SQL API


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/96a2a4ac
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/96a2a4ac
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/96a2a4ac

Branch: refs/heads/master
Commit: 96a2a4aca547d71eeab36094edff2159cc0bab4f
Parents: 5efdbfb
Author: RJ Nowling <[email protected]>
Authored: Tue Sep 8 13:48:15 2015 -0500
Committer: RJ Nowling <[email protected]>
Committed: Tue Sep 8 13:49:19 2015 -0500

----------------------------------------------------------------------
 .../spark/analytics/PetStoreStatistics.scala       | 17 +++++++++--------
 1 file changed, 9 insertions(+), 8 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/96a2a4ac/bigtop-bigpetstore/bigpetstore-spark/src/main/scala/org/apache/bigpetstore/spark/analytics/PetStoreStatistics.scala
----------------------------------------------------------------------
diff --git 
a/bigtop-bigpetstore/bigpetstore-spark/src/main/scala/org/apache/bigpetstore/spark/analytics/PetStoreStatistics.scala
 
b/bigtop-bigpetstore/bigpetstore-spark/src/main/scala/org/apache/bigpetstore/spark/analytics/PetStoreStatistics.scala
index 2d376a4..e7e5b08 100644
--- 
a/bigtop-bigpetstore/bigpetstore-spark/src/main/scala/org/apache/bigpetstore/spark/analytics/PetStoreStatistics.scala
+++ 
b/bigtop-bigpetstore/bigpetstore-spark/src/main/scala/org/apache/bigpetstore/spark/analytics/PetStoreStatistics.scala
@@ -68,7 +68,7 @@ object PetStoreStatistics {
   def queryTxByMonth(sqlContext: SQLContext): Array[StatisticsTxByMonth] = {
     import sqlContext._
 
-    val results: SchemaRDD = sql("SELECT count(*), month FROM Transactions 
GROUP BY month")
+    val results: DataFrame = sql("SELECT count(*), month FROM Transactions 
GROUP BY month")
     val transactionsByMonth = results.collect()
     for(x<-transactionsByMonth){
       println(x)
@@ -82,7 +82,7 @@ object PetStoreStatistics {
   def queryTxByProductZip(sqlContext: SQLContext): 
Array[StatisticsTxByProductZip] = {
     import sqlContext._
 
-    val results: SchemaRDD = sql(
+    val results: DataFrame = sql(
       """SELECT count(*) c, productId, zipcode
 FROM Transactions t
 JOIN Stores s ON t.storeId = s.storeId
@@ -104,7 +104,7 @@ GROUP BY productId, zipcode""")
   def queryTxByProduct(sqlContext: SQLContext): Array[StatisticsTxByProduct] = 
{
     import sqlContext._
 
-    val results: SchemaRDD = sql(
+    val results: DataFrame = sql(
       """SELECT count(*) c, productId FROM Transactions GROUP BY productId""")
 
     val groupedProducts = results.collect()
@@ -121,16 +121,17 @@ GROUP BY productId, zipcode""")
 
     val sqlContext = new org.apache.spark.sql.SQLContext(sc)
     import sqlContext._
+    import sqlContext.implicits._
 
     // Transform the Non-SparkSQL Calendar into a SparkSQL-friendly field.
     val mappableTransactions:RDD[TransactionSQL] =
       r._5.map { trans => trans.toSQL() }
 
-    r._1.registerTempTable("Locations")
-    r._2.registerTempTable("Stores")
-    r._3.registerTempTable("Customers")
-    r._4.registerTempTable("Product")
-    mappableTransactions.registerTempTable("Transactions")
+    r._1.toDF().registerTempTable("Locations")
+    r._2.toDF().registerTempTable("Stores")
+    r._3.toDF().registerTempTable("Customers")
+    r._4.toDF().registerTempTable("Product")
+    mappableTransactions.toDF().registerTempTable("Transactions")
 
 
     val txByMonth = queryTxByMonth(sqlContext)

Reply via email to