[jira] [Updated] (HIVE-18175) no viable alternative at input 'INSERT '

2017-11-28 Thread Ravi (JIRA)

 [ 
https://issues.apache.org/jira/browse/HIVE-18175?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Ravi updated HIVE-18175:

Description: 
abc.txt   (Source File)

2,bbb
3,ccc


package come.hive.programs

import org.apache.log4j._
import org.apache.spark.sql._
import org.apache.spark.SparkConf
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.SparkSession



object HiveLoad {
  
  case class Person (empno:String,ename: String)
  
  
  
  def main(args: Array[String]): Unit = {

Logger.getLogger("org").setLevel(Level.ERROR)



 val conf : SparkConf  = new 
SparkConf().setAppName("RSS_Entries").setMaster("local[*]");
  val spark = 
SparkSession.builder().appName("STORE_RSS_Entries").master("local[*]")
.config("hive.exec.dynamic.partition", "true")
.config("hive.exec.dynamic.partition.mode", "nonstrict")
.enableHiveSupport()
.getOrCreate();
  
   println("Connection Started..")

 val file =spark.sparkContext.textFile("/home/ravi/dataHub/source/abc.txt")

  import spark.implicits._ 

  val x = file.map(line =>line.split(",")).map(p => Person 
(p(0),p(1))).toDS()
 
  x.createOrReplaceTempView("rssdata")
  x.printSchema()
  spark.sql("select empno,ename from rssdata").show()
 
 
  val abcHive = "INSERT OVERWRITE  TABLE sample1.emp select empno,ename 
from rssdata"
  
  spark.sql(abcHive)
 
 println("Connection Completed..")

  }
  
  
}


ERROR
===

17/11/28 19:40:40 INFO SparkSqlParser: Parsing command: INSERT OVERWRITE  TABLE 
sample1.emp select empno,ename from rssdata
Exception in thread "main" org.apache.spark.sql.catalyst.parser.ParseException: 
no viable alternative at input 'INSERT '(line 1, pos 6)

== SQL ==
INSERT OVERWRITE  TABLE sample1.emp select empno,ename from rssdata
--^^^

at 
org.apache.spark.sql.catalyst.parser.ParseException.withCommand(ParseDriver.scala:197)
at 
org.apache.spark.sql.catalyst.parser.AbstractSqlParser.parse(ParseDriver.scala:99)
at 
org.apache.spark.sql.execution.SparkSqlParser.parse(SparkSqlParser.scala:46)
at 
org.apache.spark.sql.catalyst.parser.AbstractSqlParser.parsePlan(ParseDriver.scala:53)
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:582)
at come.hive.programs.HiveLoad$.main(HiveLoad.scala:60)
at come.hive.programs.HiveLoad.main(HiveLoad.scala)
17/11/28 19:40:40 INFO SparkContext: Invoking stop() from shutdown hook
17/11/28 19:40:40 INFO SparkUI: Stopped Spark web UI at 
http://192.168.183.133:4040
17/11/28 19:40:40 INFO MapOutputTrackerMasterEndpoint: 
MapOutputTrackerMasterEndpoint stopped!
17/11/28 19:40:40 INFO MemoryStore: MemoryStore cleared
17/11/28 19:40:40 INFO BlockManager: BlockManager stopped
17/11/28 19:40:40 INFO BlockManagerMaster: BlockManagerMaster stopped
17/11/28 19:40:40 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: 
OutputCommitCoordinator stopped!
17/11/28 19:40:40 INFO SparkContext: Successfully stopped SparkContext
17/11/28 19:40:40 INFO ShutdownHookManager: Shutdown hook called
17/11/28 19:40:40 INFO ShutdownHookManager: Deleting directory 
/tmp/spark-71d7ec75-14b9-4216-9563-54f296e7b012

  was:
abc.txt   (Source File)

2,bbb
3,ccc


package come.hive.programs

import org.apache.log4j._
import org.apache.spark.sql._
import org.apache.spark.SparkConf
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.SparkSession



object HiveLoad {
  
  
  case class Person (empno:String,ename: String)
  
  
  
  def main(args: Array[String]): Unit = {

Logger.getLogger("org").setLevel(Level.ERROR)



 val conf : SparkConf  = new 
SparkConf().setAppName("RSS_Entries").setMaster("local[*]");
  val spark = 
SparkSession.builder().appName("STORE_RSS_Entries").master("local[*]")
.config("hive.exec.dynamic.partition", "true")
.config("hive.exec.dynamic.partition.mode", "nonstrict")
.enableHiveSupport()
.getOrCreate();
  
   println("Connection Started..")

 val file =spark.sparkContext.textFile("/home/ravi/dataHub/source/abc.txt")

  import spark.implicits._ 

  val x = file.map(line =>line.split(",")).map(p => Person 
(p(0),p(1))).toDS()
 
  x.createOrReplaceTempView("rssdata")
  x.printSchema()
  spark.sql("select empno,ename from rssdata").show()
 
 
  val abcHive = "INSERT OVERWRITE  TABLE sample1.emp select empno,ename 
from rssdata"
  
  spark.sql(abcHive)
 
 println("Connection Completed..")

  }
  
  
}


ERROR
===

17/11/28 19:40:40 INFO SparkSqlParser: Parsing command: INSERT OVERWRITE  TABLE 
sample1.emp select 

[jira] [Updated] (HIVE-18175) no viable alternative at input 'INSERT '

2017-11-28 Thread Ravi (JIRA)

 [ 
https://issues.apache.org/jira/browse/HIVE-18175?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Ravi updated HIVE-18175:

Description: 
abc.txt   (Source File)

2,bbb
3,ccc


package come.hive.programs

import org.apache.log4j._
import org.apache.spark.sql._
import org.apache.spark.SparkConf
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.SparkSession



object HiveLoad {
  
  case class Person (empno:String,ename: String)
  
  def main(args: Array[String]): Unit = {

Logger.getLogger("org").setLevel(Level.ERROR)



 val conf : SparkConf  = new 
SparkConf().setAppName("RSS_Entries").setMaster("local[*]");
  val spark = 
SparkSession.builder().appName("STORE_RSS_Entries").master("local[*]")
.config("hive.exec.dynamic.partition", "true")
.config("hive.exec.dynamic.partition.mode", "nonstrict")
.enableHiveSupport()
.getOrCreate();
  
   println("Connection Started..")

 val file =spark.sparkContext.textFile("/home/ravi/dataHub/source/abc.txt")

  import spark.implicits._ 

  val x = file.map(line =>line.split(",")).map(p => Person 
(p(0),p(1))).toDS()
 
  x.createOrReplaceTempView("rssdata")
  x.printSchema()
  spark.sql("select empno,ename from rssdata").show()
 
 
  val abcHive = "INSERT OVERWRITE  TABLE sample1.emp select empno,ename 
from rssdata"
  
  spark.sql(abcHive)
 
 println("Connection Completed..")

  }
  
  
}


ERROR
===

17/11/28 19:40:40 INFO SparkSqlParser: Parsing command: INSERT OVERWRITE  TABLE 
sample1.emp select empno,ename from rssdata
Exception in thread "main" org.apache.spark.sql.catalyst.parser.ParseException: 
no viable alternative at input 'INSERT '(line 1, pos 6)

== SQL ==
INSERT OVERWRITE  TABLE sample1.emp select empno,ename from rssdata
--^^^

at 
org.apache.spark.sql.catalyst.parser.ParseException.withCommand(ParseDriver.scala:197)
at 
org.apache.spark.sql.catalyst.parser.AbstractSqlParser.parse(ParseDriver.scala:99)
at 
org.apache.spark.sql.execution.SparkSqlParser.parse(SparkSqlParser.scala:46)
at 
org.apache.spark.sql.catalyst.parser.AbstractSqlParser.parsePlan(ParseDriver.scala:53)
at org.apache.spark.sql.SparkSession.sql(SparkSession.scala:582)
at come.hive.programs.HiveLoad$.main(HiveLoad.scala:60)
at come.hive.programs.HiveLoad.main(HiveLoad.scala)
17/11/28 19:40:40 INFO SparkContext: Invoking stop() from shutdown hook
17/11/28 19:40:40 INFO SparkUI: Stopped Spark web UI at 
http://192.168.183.133:4040
17/11/28 19:40:40 INFO MapOutputTrackerMasterEndpoint: 
MapOutputTrackerMasterEndpoint stopped!
17/11/28 19:40:40 INFO MemoryStore: MemoryStore cleared
17/11/28 19:40:40 INFO BlockManager: BlockManager stopped
17/11/28 19:40:40 INFO BlockManagerMaster: BlockManagerMaster stopped
17/11/28 19:40:40 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: 
OutputCommitCoordinator stopped!
17/11/28 19:40:40 INFO SparkContext: Successfully stopped SparkContext
17/11/28 19:40:40 INFO ShutdownHookManager: Shutdown hook called
17/11/28 19:40:40 INFO ShutdownHookManager: Deleting directory 
/tmp/spark-71d7ec75-14b9-4216-9563-54f296e7b012

  was:
abc.txt   (Source File)

2,bbb
3,ccc


package come.hive.programs

import org.apache.log4j._
import org.apache.spark.sql._
import org.apache.spark.SparkConf
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.sql.SparkSession



object HiveLoad {
  
  case class Person (empno:String,ename: String)
  
  
  
  def main(args: Array[String]): Unit = {

Logger.getLogger("org").setLevel(Level.ERROR)



 val conf : SparkConf  = new 
SparkConf().setAppName("RSS_Entries").setMaster("local[*]");
  val spark = 
SparkSession.builder().appName("STORE_RSS_Entries").master("local[*]")
.config("hive.exec.dynamic.partition", "true")
.config("hive.exec.dynamic.partition.mode", "nonstrict")
.enableHiveSupport()
.getOrCreate();
  
   println("Connection Started..")

 val file =spark.sparkContext.textFile("/home/ravi/dataHub/source/abc.txt")

  import spark.implicits._ 

  val x = file.map(line =>line.split(",")).map(p => Person 
(p(0),p(1))).toDS()
 
  x.createOrReplaceTempView("rssdata")
  x.printSchema()
  spark.sql("select empno,ename from rssdata").show()
 
 
  val abcHive = "INSERT OVERWRITE  TABLE sample1.emp select empno,ename 
from rssdata"
  
  spark.sql(abcHive)
 
 println("Connection Completed..")

  }
  
  
}


ERROR
===

17/11/28 19:40:40 INFO SparkSqlParser: Parsing command: INSERT OVERWRITE  TABLE 
sample1.emp select empno,ename