[ 
https://issues.apache.org/jira/browse/CARBONDATA-3156?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

xubo245 updated CARBONDATA-3156:
--------------------------------
    Issue Type: Sub-task  (was: Improvement)
        Parent: CARBONDATA-3155

> DataFrame don't support read carbontable by load path
> -----------------------------------------------------
>
>                 Key: CARBONDATA-3156
>                 URL: https://issues.apache.org/jira/browse/CARBONDATA-3156
>             Project: CarbonData
>          Issue Type: Sub-task
>    Affects Versions: 1.5.1
>            Reporter: xubo245
>            Assignee: xubo245
>            Priority: Major
>
> When I use format("carbon")
> code:
> {code:java}
>  val path2 = 
> "/Users/xubo/Desktop/xubo/git/carbondata2/examples/spark2/target/store/default/source"
>     val df = sqlContext
>       .read
>       .format("carbon")
>       .load(path2)
>     df.show()
> {code}
> result is empty:
> {code:java}
> 2018-12-10 09:56:12 INFO  ContextHandler:781 - Started 
> o.s.j.s.ServletContextHandler@5b5ac798{/static/sql,null,AVAILABLE,@Spark}
> 2018-12-10 09:56:12 INFO  StateStoreCoordinatorRef:54 - Registered 
> StateStoreCoordinator endpoint
> +----------+--------+-----------+-----------+-----------+--------------+------------+---------+---------+----------+
> |shortfield|intfield|bigintfield|doublefield|stringfield|timestampfield|decimalfield|datefield|charfield|floatfield|
> +----------+--------+-----------+-----------+-----------+--------------+------------+---------+---------+----------+
> +----------+--------+-----------+-----------+-----------+--------------+------------+---------+---------+----------+
> /Users/xubo/Desktop/xubo/git/carbondata2/examples/spark2/target/store/default/source
> {code}
> When I use format("carbondata")
> Code:
> {code:java}
>     val path2 = 
> "/Users/xubo/Desktop/xubo/git/carbondata2/examples/spark2/target/store/default/source"
>     val df = sqlContext
>       .read
>       .format("carbondata")
>       .option("tableName", "carbon_table")
>       .load(path2)
>     df.show()
> {code}
> exception:
> {code:java}
> Table or view 'carbon_table' not found in database 'default';
> org.apache.spark.sql.catalyst.analysis.NoSuchTableException: Table or view 
> 'carbon_table' not found in database 'default';
>       at 
> org.apache.spark.sql.hive.CarbonFileMetastore.createCarbonRelation(CarbonFileMetastore.scala:120)
>       at 
> org.apache.spark.sql.CarbonDatasourceHadoopRelation.carbonRelation$lzycompute(CarbonDatasourceHadoopRelation.scala:65)
>       at 
> org.apache.spark.sql.CarbonDatasourceHadoopRelation.carbonRelation(CarbonDatasourceHadoopRelation.scala:63)
>       at 
> org.apache.spark.sql.CarbonDatasourceHadoopRelation$$anonfun$schema$1.apply(CarbonDatasourceHadoopRelation.scala:72)
>       at 
> org.apache.spark.sql.CarbonDatasourceHadoopRelation$$anonfun$schema$1.apply(CarbonDatasourceHadoopRelation.scala:72)
>       at scala.Option.getOrElse(Option.scala:121)
>       at 
> org.apache.spark.sql.CarbonDatasourceHadoopRelation.schema(CarbonDatasourceHadoopRelation.scala:72)
>       at 
> org.apache.spark.sql.execution.datasources.DataSource.resolveRelation(DataSource.scala:432)
>       at 
> org.apache.spark.sql.DataFrameReader.loadV1Source(DataFrameReader.scala:239)
>       at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:227)
>       at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:174)
>       at 
> org.apache.carbondata.spark.testsuite.createTable.TestDataFrameReadNonTransactionalTableData$$anonfun$3.apply$mcV$sp(TestDataFrameReadNonTransactionalTableData.scala:300)
>       at 
> org.apache.carbondata.spark.testsuite.createTable.TestDataFrameReadNonTransactionalTableData$$anonfun$3.apply(TestDataFrameReadNonTransactionalTableData.scala:291)
>       at 
> org.apache.carbondata.spark.testsuite.createTable.TestDataFrameReadNonTransactionalTableData$$anonfun$3.apply(TestDataFrameReadNonTransactionalTableData.scala:291)
>       at 
> org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22)
>       at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85)
>       at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
>       at org.scalatest.Transformer.apply(Transformer.scala:22)
>       at org.scalatest.Transformer.apply(Transformer.scala:20)
>       at org.scalatest.FunSuiteLike$$anon$1.apply(FunSuiteLike.scala:166)
>       at 
> org.apache.spark.sql.test.util.CarbonFunSuite.withFixture(CarbonFunSuite.scala:41)
>       at 
> org.scalatest.FunSuiteLike$class.invokeWithFixture$1(FunSuiteLike.scala:163)
>       at 
> org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
>       at 
> org.scalatest.FunSuiteLike$$anonfun$runTest$1.apply(FunSuiteLike.scala:175)
>       at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
>       at org.scalatest.FunSuiteLike$class.runTest(FunSuiteLike.scala:175)
>       at org.scalatest.FunSuite.runTest(FunSuite.scala:1555)
>       at 
> org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
>       at 
> org.scalatest.FunSuiteLike$$anonfun$runTests$1.apply(FunSuiteLike.scala:208)
>       at 
> org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
>       at 
> org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
>       at scala.collection.immutable.List.foreach(List.scala:381)
>       at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
>       at 
> org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
>       at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483)
>       at org.scalatest.FunSuiteLike$class.runTests(FunSuiteLike.scala:208)
>       at org.scalatest.FunSuite.runTests(FunSuite.scala:1555)
>       at org.scalatest.Suite$class.run(Suite.scala:1424)
>       at 
> org.scalatest.FunSuite.org$scalatest$FunSuiteLike$$super$run(FunSuite.scala:1555)
>       at 
> org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
>       at 
> org.scalatest.FunSuiteLike$$anonfun$run$1.apply(FunSuiteLike.scala:212)
>       at org.scalatest.SuperEngine.runImpl(Engine.scala:545)
>       at org.scalatest.FunSuiteLike$class.run(FunSuiteLike.scala:212)
>       at 
> org.apache.carbondata.spark.testsuite.createTable.TestDataFrameReadNonTransactionalTableData.org$scalatest$BeforeAndAfterAll$$super$run(TestDataFrameReadNonTransactionalTableData.scala:57)
>       at 
> org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
>       at 
> org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256)
>       at 
> org.apache.carbondata.spark.testsuite.createTable.TestDataFrameReadNonTransactionalTableData.run(TestDataFrameReadNonTransactionalTableData.scala:57)
>       at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)
>       at 
> org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)
>       at 
> org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)
>       at scala.collection.immutable.List.foreach(List.scala:381)
>       at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)
>       at 
> org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)
>       at 
> org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)
>       at 
> org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)
>       at 
> org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)
>       at org.scalatest.tools.Runner$.run(Runner.scala:883)
>       at org.scalatest.tools.Runner.run(Runner.scala)
>       at 
> org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:131)
>       at 
> org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)
> {code}



--
This message was sent by Atlassian JIRA
(v7.6.3#76005)

Reply via email to