[ 
https://issues.apache.org/jira/browse/IGNITE-12435?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel
 ]

Ivan Gagarkin updated IGNITE-12435:
-----------------------------------
    Epic Link: IGNITE-17460

> [Spark] Add support for saving to existing table via saveAsTable
> ----------------------------------------------------------------
>
>                 Key: IGNITE-12435
>                 URL: https://issues.apache.org/jira/browse/IGNITE-12435
>             Project: Ignite
>          Issue Type: Bug
>          Components: spark
>            Reporter: Alexey Zinoviev
>            Assignee: Alexey Zinoviev
>            Priority: Major
>             Fix For: 3.0
>
>
> Tests in IgniteSQLDataFrameIgniteSessionWriteSpec are muted due to strange 
> error related to working with filesystems and schemas
>  
> All three tests generates the same error when you are trying to call 
> saveAsTable as a terminal operation on dataframe write: 
> java.io.IOException: No FileSystem for scheme: ignitejava.io.IOException: No 
> FileSystem for scheme: ignite
>  at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2586) 
> at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2593) at 
> org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:91) at 
> org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2632) at 
> org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2614) at 
> org.apache.hadoop.fs.FileSystem.get(FileSystem.java:370) at 
> org.apache.hadoop.fs.Path.getFileSystem(Path.java:296) at 
> org.apache.spark.sql.catalyst.catalog.SessionCatalog.validateTableLocation(SessionCatalog.scala:333)
>  at 
> org.apache.spark.sql.execution.command.CreateDataSourceTableAsSelectCommand.run(createDataSourceTables.scala:170)
>  at 
> org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:104)
>  at 
> org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:102)
>  at 
> org.apache.spark.sql.execution.command.DataWritingCommandExec.doExecute(commands.scala:122)
>  at 
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:131)
>  at 
> org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:127)
>  at 
> org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:155)
>  at 
> org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
>  at 
> org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:152) at 
> org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:127) at 
> org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:80)
>  at 
> org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:80) 
> at 
> org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:676)
>  at 
> org.apache.spark.sql.DataFrameWriter$$anonfun$runCommand$1.apply(DataFrameWriter.scala:676)
>  at 
> org.apache.spark.sql.execution.SQLExecution$$anonfun$withNewExecutionId$1.apply(SQLExecution.scala:78)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:125)
>  at 
> org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:73)
>  at 
> org.apache.spark.sql.DataFrameWriter.runCommand(DataFrameWriter.scala:676) at 
> org.apache.spark.sql.DataFrameWriter.createTable(DataFrameWriter.scala:474) 
> at 
> org.apache.spark.sql.DataFrameWriter.saveAsTable(DataFrameWriter.scala:449) 
> at 
> org.apache.spark.sql.DataFrameWriter.saveAsTable(DataFrameWriter.scala:409) 
> at 
> org.apache.ignite.spark.IgniteSQLDataFrameIgniteSessionWriteSpec$$anonfun$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(IgniteSQLDataFrameIgniteSessionWriteSpec.scala:45)
>  at 
> org.apache.ignite.spark.IgniteSQLDataFrameIgniteSessionWriteSpec$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(IgniteSQLDataFrameIgniteSessionWriteSpec.scala:35)
>  at 
> org.apache.ignite.spark.IgniteSQLDataFrameIgniteSessionWriteSpec$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(IgniteSQLDataFrameIgniteSessionWriteSpec.scala:35)
>  at 
> org.scalatest.Transformer$$anonfun$apply$1.apply$mcV$sp(Transformer.scala:22) 
> at org.scalatest.OutcomeOf$class.outcomeOf(OutcomeOf.scala:85) at 
> org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104) at 
> org.scalatest.Transformer.apply(Transformer.scala:22) at 
> org.scalatest.Transformer.apply(Transformer.scala:20) at 
> org.scalatest.FunSpecLike$$anon$1.apply(FunSpecLike.scala:422) at 
> org.scalatest.Suite$class.withFixture(Suite.scala:1122) at 
> org.scalatest.FunSpec.withFixture(FunSpec.scala:1626) at 
> org.scalatest.FunSpecLike$class.invokeWithFixture$1(FunSpecLike.scala:419) at 
> org.scalatest.FunSpecLike$$anonfun$runTest$1.apply(FunSpecLike.scala:431) at 
> org.scalatest.FunSpecLike$$anonfun$runTest$1.apply(FunSpecLike.scala:431) at 
> org.scalatest.SuperEngine.runTestImpl(Engine.scala:306) at 
> org.scalatest.FunSpecLike$class.runTest(FunSpecLike.scala:431) at 
> org.apache.ignite.spark.AbstractDataFrameSpec.org$scalatest$BeforeAndAfter$$super$runTest(AbstractDataFrameSpec.scala:39)
>  at org.scalatest.BeforeAndAfter$class.runTest(BeforeAndAfter.scala:200) at 
> org.apache.ignite.spark.AbstractDataFrameSpec.runTest(AbstractDataFrameSpec.scala:39)
>  at 
> org.scalatest.FunSpecLike$$anonfun$runTests$1.apply(FunSpecLike.scala:464) at 
> org.scalatest.FunSpecLike$$anonfun$runTests$1.apply(FunSpecLike.scala:464) at 
> org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:413)
>  at 
> org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
>  at scala.collection.immutable.List.foreach(List.scala:392) at 
> org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) at 
> org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:390)
>  at 
> org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:427)
>  at 
> org.scalatest.SuperEngine$$anonfun$traverseSubNodes$1$1.apply(Engine.scala:401)
>  at scala.collection.immutable.List.foreach(List.scala:392) at 
> org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401) at 
> org.scalatest.SuperEngine.org$scalatest$SuperEngine$$runTestsInBranch(Engine.scala:396)
>  at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:483) at 
> org.scalatest.FunSpecLike$class.runTests(FunSpecLike.scala:464) at 
> org.scalatest.FunSpec.runTests(FunSpec.scala:1626) at 
> org.scalatest.Suite$class.run(Suite.scala:1424) at 
> org.scalatest.FunSpec.org$scalatest$FunSpecLike$$super$run(FunSpec.scala:1626)
>  at org.scalatest.FunSpecLike$$anonfun$run$1.apply(FunSpecLike.scala:468) at 
> org.scalatest.FunSpecLike$$anonfun$run$1.apply(FunSpecLike.scala:468) at 
> org.scalatest.SuperEngine.runImpl(Engine.scala:545) at 
> org.scalatest.FunSpecLike$class.run(FunSpecLike.scala:468) at 
> org.apache.ignite.spark.AbstractDataFrameSpec.org$scalatest$BeforeAndAfterAll$$super$run(AbstractDataFrameSpec.scala:39)
>  at 
> org.scalatest.BeforeAndAfterAll$class.liftedTree1$1(BeforeAndAfterAll.scala:257)
>  at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:256) at 
> org.apache.ignite.spark.AbstractDataFrameSpec.org$scalatest$BeforeAndAfter$$super$run(AbstractDataFrameSpec.scala:39)
>  at org.scalatest.BeforeAndAfter$class.run(BeforeAndAfter.scala:241) at 
> org.apache.ignite.spark.AbstractDataFrameSpec.run(AbstractDataFrameSpec.scala:39)
>  at
>  
>  
>  
>  
>  
>  
>  
>  
> org.scalatest.junit.JUnitRunner.run(JUnitRunner.scala:99) at 
> org.junit.runner.JUnitCore.run(JUnitCore.java:160) at 
> com.intellij.junit4.JUnit4IdeaTestRunner.startRunnerWithArgs(JUnit4IdeaTestRunner.java:68)
>  at 
> com.intellij.rt.junit.IdeaTestRunner$Repeater.startRunnerWithArgs(IdeaTestRunner.java:33)
>  at 
> com.intellij.rt.junit.JUnitStarter.prepareStreamsAndStart(JUnitStarter.java:230)
>  at com.intellij.rt.junit.JUnitStarter.main(JUnitStarter.java:58)



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to