[ 
https://issues.apache.org/jira/browse/SPARK-17622?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

renzhi he updated SPARK-17622:
------------------------------
    Description: 
sc <- sparkR.session(master="local[*]", appName="sparkR", sparkConfig = 
list(spark.driver.memory = "2g"))

df <- as.DataFrame(faithful)

get error below:

Error in invokeJava(isStatic = TRUE, className, methodName, ...) :            
java.lang.reflect.InvocationTargetException
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
at 
org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)
at 
org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)
at 
org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)
at 
org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)
at 
org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)
at 
org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)
at org.apache.spark.sql.hive.HiveSharedSt


on spark 1.6.1 and spark 1.6.2 can run the corresponding codes.
sc1 <- sparkR.init(master = "local[*]", sparkEnvir = 
list(spark.driver.memory="2g"))
sqlContext <- sparkRSQL.init(sc1)
df <- as.DataFrame(sqlContext,faithful)

  was:
sc <- sparkR.session(master="spark://spark01.cmua.dom:7077", appName="sparkR", 
sparkConfig = list(spark.driver.memory = "2g"))

df <- as.DataFrame(faithful)


get error below:
Error in invokeJava(isStatic = TRUE, className, methodName, ...) :            
java.lang.reflect.InvocationTargetException
        at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
        at 
sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
       at 
sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
      at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
      at 
org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)
        at 
org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)
        at 
org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)
       at 
org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)
        at 
org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)
        at 
org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)
        at org.apache.spark.sql.hive.HiveSharedSt


> Cannot run SparkR function on Windows- Spark 2.0.0
> --------------------------------------------------
>
>                 Key: SPARK-17622
>                 URL: https://issues.apache.org/jira/browse/SPARK-17622
>             Project: Spark
>          Issue Type: Bug
>          Components: Java API
>    Affects Versions: 2.0.0
>         Environment: windows 10
> R 3.3.1
> RStudio 1.0.20
>            Reporter: renzhi he
>              Labels: windows
>             Fix For: 1.6.1, 1.6.2
>
>
> sc <- sparkR.session(master="local[*]", appName="sparkR", sparkConfig = 
> list(spark.driver.memory = "2g"))
> df <- as.DataFrame(faithful)
> get error below:
> Error in invokeJava(isStatic = TRUE, className, methodName, ...) :            
> java.lang.reflect.InvocationTargetException
> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
> at 
> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)
> at 
> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
> at java.lang.reflect.Constructor.newInstance(Constructor.java:423)
> at 
> org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)
> at 
> org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)
> at 
> org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)
> at 
> org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)
> at 
> org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)
> at 
> org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)
> at org.apache.spark.sql.hive.HiveSharedSt
> on spark 1.6.1 and spark 1.6.2 can run the corresponding codes.
> sc1 <- sparkR.init(master = "local[*]", sparkEnvir = 
> list(spark.driver.memory="2g"))
> sqlContext <- sparkRSQL.init(sc1)
> df <- as.DataFrame(sqlContext,faithful)



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

---------------------------------------------------------------------
To unsubscribe, e-mail: issues-unsubscr...@spark.apache.org
For additional commands, e-mail: issues-h...@spark.apache.org

Reply via email to