AngersZhuuuu commented on a change in pull request #35258:
URL: https://github.com/apache/spark/pull/35258#discussion_r791371830
##########
File path:
sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
##########
@@ -2926,16 +2926,18 @@ class HiveDDLSuite
}
}
- test("SPARK-33844: Insert overwrite directory should check schema too") {
+ test("SPARK-33844, 37969: Insert overwrite directory should check schema
too") {
withView("v") {
spark.range(1).createTempView("v")
withTempPath { path =>
- val e = intercept[SparkException] {
- spark.sql(s"INSERT OVERWRITE LOCAL DIRECTORY
'${path.getCanonicalPath}' " +
- s"STORED AS PARQUET SELECT ID, if(1=1, 1, 0), abs(id), '^-' FROM
v")
- }.getCause.getCause.getMessage
- assert(e.contains(
- "field ended by ';': expected ';' but got 'IF' at line 2: optional
int32 (IF"))
+ Seq("PARQUET", "ORC").foreach { format =>
+ val e = intercept[SparkException] {
Review comment:
> should this be `AnalysisException` now?
Wrapped by
```
Cause: org.apache.spark.SparkException: Failed inserting overwrite directory
/Users/yi.zhu/Documents/project/Angerszhuuuu/spark/sql/hive/target/tmp/hive_execution_test_group/spark-ef505a83-fb40-40f5-8a9f-b370c8425a0d
[info] at
org.apache.spark.sql.hive.execution.InsertIntoHiveDirCommand.run(InsertIntoHiveDirCommand.scala:133)
```
The whole error stack is
```
[info] Cause: org.apache.spark.SparkException: Failed inserting overwrite
directory
/Users/yi.zhu/Documents/project/Angerszhuuuu/spark/sql/hive/target/tmp/hive_execution_test_group/spark-ef505a83-fb40-40f5-8a9f-b370c8425a0d
[info] at
org.apache.spark.sql.hive.execution.InsertIntoHiveDirCommand.run(InsertIntoHiveDirCommand.scala:133)
[info] at
org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:113)
[info] at
org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:111)
[info] at
org.apache.spark.sql.execution.command.DataWritingCommandExec.executeCollect(commands.scala:125)
[info] at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:110)
[info] at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:109)
[info] at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:169)
[info] at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:95)
[info] at
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
[info] at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
[info] at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:110)
[info] at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:106)
[info] at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:481)
[info] at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:83)
[info] at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:481)
[info] at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
[info] at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
[info] at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
[info] at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
[info] at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
[info] at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:457)
[info] at
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:106)
[info] at
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:93)
[info] at
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:91)
[info] at org.apache.spark.sql.Dataset.<init>(Dataset.scala:220)
[info] at org.apache.spark.sql.Dataset$.$anonfun$ofRows$1(Dataset.scala:92)
[info] at
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
[info] at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:89)
[info] at
org.apache.spark.sql.hive.test.TestHiveSparkSession.$anonfun$sql$1(TestHive.scala:240)
[info] at
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
[info] at
org.apache.spark.sql.hive.test.TestHiveSparkSession.sql(TestHive.scala:238)
[info] at
org.apache.spark.sql.hive.execution.HiveDDLSuite.$anonfun$new$389(HiveDDLSuite.scala:2935)
[info] at org.scalatest.Assertions.intercept(Assertions.scala:749)
[info] at org.scalatest.Assertions.intercept$(Assertions.scala:746)
[info] at
org.scalatest.funsuite.AnyFunSuite.intercept(AnyFunSuite.scala:1563)
[info] at
org.apache.spark.sql.hive.execution.HiveDDLSuite.$anonfun$new$388(HiveDDLSuite.scala:2933)
[info] at
org.apache.spark.sql.hive.execution.HiveDDLSuite.$anonfun$new$388$adapted(HiveDDLSuite.scala:2932)
[info] at
org.apache.spark.sql.catalyst.plans.SQLHelper.withTempPath(SQLHelper.scala:69)
[info] at
org.apache.spark.sql.catalyst.plans.SQLHelper.withTempPath$(SQLHelper.scala:66)
[info] at org.apache.spark.sql.QueryTest.withTempPath(QueryTest.scala:34)
[info] at
org.apache.spark.sql.hive.execution.HiveDDLSuite.$anonfun$new$387(HiveDDLSuite.scala:2932)
[info] at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
[info] at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1475)
[info] at
org.apache.spark.sql.test.SQLTestUtilsBase.withView(SQLTestUtils.scala:317)
[info] at
org.apache.spark.sql.test.SQLTestUtilsBase.withView$(SQLTestUtils.scala:315)
[info] at
org.apache.spark.sql.hive.execution.HiveDDLSuite.withView(HiveDDLSuite.scala:395)
[info] at
org.apache.spark.sql.hive.execution.HiveDDLSuite.$anonfun$new$386(HiveDDLSuite.scala:2930)
[info] at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
[info] at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
[info] at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
[info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
[info] at org.scalatest.Transformer.apply(Transformer.scala:22)
[info] at org.scalatest.Transformer.apply(Transformer.scala:20)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:190)
[info] at
org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:203)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:188)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:200)
[info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:200)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:182)
[info] at
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:64)
[info] at
org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
[info] at
org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
[info] at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:64)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:233)
[info] at
org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
[info] at scala.collection.immutable.List.foreach(List.scala:431)
[info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
[info] at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
[info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:233)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:232)
[info] at
org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1563)
[info] at org.scalatest.Suite.run(Suite.scala:1112)
[info] at org.scalatest.Suite.run$(Suite.scala:1094)
[info] at
org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1563)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:237)
[info] at org.scalatest.SuperEngine.runImpl(Engine.scala:535)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:237)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:236)
[info] at
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:64)
[info] at
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
[info] at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
[info] at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
[info] at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:64)
[info] at
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:318)
[info] at
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:513)
[info] at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:413)
[info] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[info] at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[info] at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[info] at java.lang.Thread.run(Thread.java:748)
[info] Cause: org.apache.spark.sql.AnalysisException: Column name "(IF((1
= 1), 1, 0))" contains invalid character(s). Please use alias to rename it.
[info] at
org.apache.spark.sql.errors.QueryCompilationErrors$.columnNameContainsInvalidCharactersError(QueryCompilationErrors.scala:2079)
[info] at
org.apache.spark.sql.execution.datasources.DataSourceUtils$.$anonfun$checkFieldNames$1(DataSourceUtils.scala:70)
[info] at
org.apache.spark.sql.execution.datasources.DataSourceUtils$.$anonfun$checkFieldNames$1$adapted(DataSourceUtils.scala:68)
[info] at scala.collection.Iterator.foreach(Iterator.scala:943)
[info] at scala.collection.Iterator.foreach$(Iterator.scala:943)
[info] at scala.collection.AbstractIterator.foreach(Iterator.scala:1431)
[info] at scala.collection.IterableLike.foreach(IterableLike.scala:74)
[info] at scala.collection.IterableLike.foreach$(IterableLike.scala:73)
[info] at
org.apache.spark.sql.types.StructType.foreach(StructType.scala:102)
[info] at
org.apache.spark.sql.execution.datasources.DataSourceUtils$.checkFieldNames(DataSourceUtils.scala:68)
[info] at
org.apache.spark.sql.execution.datasources.DataSourceUtils$.verifySchema(DataSourceUtils.scala:89)
[info] at
org.apache.spark.sql.execution.datasources.FileFormatWriter$.write(FileFormatWriter.scala:163)
[info] at
org.apache.spark.sql.hive.execution.SaveAsHiveFile.saveAsHiveFile(SaveAsHiveFile.scala:105)
[info] at
org.apache.spark.sql.hive.execution.SaveAsHiveFile.saveAsHiveFile$(SaveAsHiveFile.scala:50)
[info] at
org.apache.spark.sql.hive.execution.InsertIntoHiveDirCommand.saveAsHiveFile(InsertIntoHiveDirCommand.scala:53)
[info] at
org.apache.spark.sql.hive.execution.InsertIntoHiveDirCommand.run(InsertIntoHiveDirCommand.scala:108)
[info] at
org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult$lzycompute(commands.scala:113)
[info] at
org.apache.spark.sql.execution.command.DataWritingCommandExec.sideEffectResult(commands.scala:111)
[info] at
org.apache.spark.sql.execution.command.DataWritingCommandExec.executeCollect(commands.scala:125)
[info] at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.$anonfun$applyOrElse$1(QueryExecution.scala:110)
[info] at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$6(SQLExecution.scala:109)
[info] at
org.apache.spark.sql.execution.SQLExecution$.withSQLConfPropagated(SQLExecution.scala:169)
[info] at
org.apache.spark.sql.execution.SQLExecution$.$anonfun$withNewExecutionId$1(SQLExecution.scala:95)
[info] at
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
[info] at
org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:64)
[info] at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:110)
[info] at
org.apache.spark.sql.execution.QueryExecution$$anonfun$eagerlyExecuteCommands$1.applyOrElse(QueryExecution.scala:106)
[info] at
org.apache.spark.sql.catalyst.trees.TreeNode.$anonfun$transformDownWithPruning$1(TreeNode.scala:481)
[info] at
org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:83)
[info] at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDownWithPruning(TreeNode.scala:481)
[info] at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.org$apache$spark$sql$catalyst$plans$logical$AnalysisHelper$$super$transformDownWithPruning(LogicalPlan.scala:30)
[info] at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning(AnalysisHelper.scala:267)
[info] at
org.apache.spark.sql.catalyst.plans.logical.AnalysisHelper.transformDownWithPruning$(AnalysisHelper.scala:263)
[info] at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
[info] at
org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.transformDownWithPruning(LogicalPlan.scala:30)
[info] at
org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:457)
[info] at
org.apache.spark.sql.execution.QueryExecution.eagerlyExecuteCommands(QueryExecution.scala:106)
[info] at
org.apache.spark.sql.execution.QueryExecution.commandExecuted$lzycompute(QueryExecution.scala:93)
[info] at
org.apache.spark.sql.execution.QueryExecution.commandExecuted(QueryExecution.scala:91)
[info] at org.apache.spark.sql.Dataset.<init>(Dataset.scala:220)
[info] at org.apache.spark.sql.Dataset$.$anonfun$ofRows$1(Dataset.scala:92)
[info] at
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
[info] at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:89)
[info] at
org.apache.spark.sql.hive.test.TestHiveSparkSession.$anonfun$sql$1(TestHive.scala:240)
[info] at
org.apache.spark.sql.SparkSession.withActive(SparkSession.scala:779)
[info] at
org.apache.spark.sql.hive.test.TestHiveSparkSession.sql(TestHive.scala:238)
[info] at
org.apache.spark.sql.hive.execution.HiveDDLSuite.$anonfun$new$389(HiveDDLSuite.scala:2935)
[info] at org.scalatest.Assertions.intercept(Assertions.scala:749)
[info] at org.scalatest.Assertions.intercept$(Assertions.scala:746)
[info] at
org.scalatest.funsuite.AnyFunSuite.intercept(AnyFunSuite.scala:1563)
[info] at
org.apache.spark.sql.hive.execution.HiveDDLSuite.$anonfun$new$388(HiveDDLSuite.scala:2933)
[info] at
org.apache.spark.sql.hive.execution.HiveDDLSuite.$anonfun$new$388$adapted(HiveDDLSuite.scala:2932)
[info] at
org.apache.spark.sql.catalyst.plans.SQLHelper.withTempPath(SQLHelper.scala:69)
[info] at
org.apache.spark.sql.catalyst.plans.SQLHelper.withTempPath$(SQLHelper.scala:66)
[info] at org.apache.spark.sql.QueryTest.withTempPath(QueryTest.scala:34)
[info] at
org.apache.spark.sql.hive.execution.HiveDDLSuite.$anonfun$new$387(HiveDDLSuite.scala:2932)
[info] at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
[info] at org.apache.spark.util.Utils$.tryWithSafeFinally(Utils.scala:1475)
[info] at
org.apache.spark.sql.test.SQLTestUtilsBase.withView(SQLTestUtils.scala:317)
[info] at
org.apache.spark.sql.test.SQLTestUtilsBase.withView$(SQLTestUtils.scala:315)
[info] at
org.apache.spark.sql.hive.execution.HiveDDLSuite.withView(HiveDDLSuite.scala:395)
[info] at
org.apache.spark.sql.hive.execution.HiveDDLSuite.$anonfun$new$386(HiveDDLSuite.scala:2930)
[info] at
scala.runtime.java8.JFunction0$mcV$sp.apply(JFunction0$mcV$sp.java:23)
[info] at org.scalatest.OutcomeOf.outcomeOf(OutcomeOf.scala:85)
[info] at org.scalatest.OutcomeOf.outcomeOf$(OutcomeOf.scala:83)
[info] at org.scalatest.OutcomeOf$.outcomeOf(OutcomeOf.scala:104)
[info] at org.scalatest.Transformer.apply(Transformer.scala:22)
[info] at org.scalatest.Transformer.apply(Transformer.scala:20)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike$$anon$1.apply(AnyFunSuiteLike.scala:190)
[info] at
org.apache.spark.SparkFunSuite.withFixture(SparkFunSuite.scala:203)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.invokeWithFixture$1(AnyFunSuiteLike.scala:188)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTest$1(AnyFunSuiteLike.scala:200)
[info] at org.scalatest.SuperEngine.runTestImpl(Engine.scala:306)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.runTest(AnyFunSuiteLike.scala:200)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.runTest$(AnyFunSuiteLike.scala:182)
[info] at
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterEach$$super$runTest(SparkFunSuite.scala:64)
[info] at
org.scalatest.BeforeAndAfterEach.runTest(BeforeAndAfterEach.scala:234)
[info] at
org.scalatest.BeforeAndAfterEach.runTest$(BeforeAndAfterEach.scala:227)
[info] at org.apache.spark.SparkFunSuite.runTest(SparkFunSuite.scala:64)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$runTests$1(AnyFunSuiteLike.scala:233)
[info] at
org.scalatest.SuperEngine.$anonfun$runTestsInBranch$1(Engine.scala:413)
[info] at scala.collection.immutable.List.foreach(List.scala:431)
[info] at org.scalatest.SuperEngine.traverseSubNodes$1(Engine.scala:401)
[info] at org.scalatest.SuperEngine.runTestsInBranch(Engine.scala:396)
[info] at org.scalatest.SuperEngine.runTestsImpl(Engine.scala:475)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.runTests(AnyFunSuiteLike.scala:233)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.runTests$(AnyFunSuiteLike.scala:232)
[info] at
org.scalatest.funsuite.AnyFunSuite.runTests(AnyFunSuite.scala:1563)
[info] at org.scalatest.Suite.run(Suite.scala:1112)
[info] at org.scalatest.Suite.run$(Suite.scala:1094)
[info] at
org.scalatest.funsuite.AnyFunSuite.org$scalatest$funsuite$AnyFunSuiteLike$$super$run(AnyFunSuite.scala:1563)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.$anonfun$run$1(AnyFunSuiteLike.scala:237)
[info] at org.scalatest.SuperEngine.runImpl(Engine.scala:535)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.run(AnyFunSuiteLike.scala:237)
[info] at
org.scalatest.funsuite.AnyFunSuiteLike.run$(AnyFunSuiteLike.scala:236)
[info] at
org.apache.spark.SparkFunSuite.org$scalatest$BeforeAndAfterAll$$super$run(SparkFunSuite.scala:64)
[info] at
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:213)
[info] at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
[info] at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
[info] at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:64)
[info] at
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:318)
[info] at
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:513)
[info] at sbt.ForkMain$Run.lambda$runTest$1(ForkMain.java:413)
[info] at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[info] at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[info] at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[info] at java.lang.Thread.run(Thread.java:748)
```
--
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
To unsubscribe, e-mail: [email protected]
For queries about this service, please contact Infrastructure at:
[email protected]
---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]