[ 
https://issues.apache.org/jira/browse/SPARK-29027?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16927783#comment-16927783
 ] 

koert kuipers commented on SPARK-29027:
---------------------------------------

i get same error in sbt i think, plus i find sbt a lot easier to handle :)
{code}
[info] KafkaDelegationTokenSuite:
[info] org.apache.spark.sql.kafka010.KafkaDelegationTokenSuite *** ABORTED *** 
(10 seconds, 543 milliseconds)
[info]   org.I0Itec.zkclient.exception.ZkAuthFailedException: Authentication 
failure
[info]   at org.I0Itec.zkclient.ZkClient.waitForKeeperState(ZkClient.java:947)
[info]   at org.I0Itec.zkclient.ZkClient.waitUntilConnected(ZkClient.java:924)
[info]   at org.I0Itec.zkclient.ZkClient.connect(ZkClient.java:1231)
[info]   at org.I0Itec.zkclient.ZkClient.<init>(ZkClient.java:157)
[info]   at org.I0Itec.zkclient.ZkClient.<init>(ZkClient.java:131)
[info]   at kafka.utils.ZkUtils$.createZkClientAndConnection(ZkUtils.scala:93)
[info]   at kafka.utils.ZkUtils$.apply(ZkUtils.scala:75)
[info]   at 
org.apache.spark.sql.kafka010.KafkaTestUtils.setupEmbeddedZookeeper(KafkaTestUtils.scala:202)
[info]   at 
org.apache.spark.sql.kafka010.KafkaTestUtils.setup(KafkaTestUtils.scala:243)
[info]   at 
org.apache.spark.sql.kafka010.KafkaDelegationTokenSuite.beforeAll(KafkaDelegationTokenSuite.scala:49)
[info]   at 
org.scalatest.BeforeAndAfterAll.liftedTree1$1(BeforeAndAfterAll.scala:212)
[info]   at org.scalatest.BeforeAndAfterAll.run(BeforeAndAfterAll.scala:210)
[info]   at org.scalatest.BeforeAndAfterAll.run$(BeforeAndAfterAll.scala:208)
[info]   at org.apache.spark.SparkFunSuite.run(SparkFunSuite.scala:56)
[info]   at 
org.scalatest.tools.Framework.org$scalatest$tools$Framework$$runSuite(Framework.scala:314)
[info]   at 
org.scalatest.tools.Framework$ScalaTestTask.execute(Framework.scala:507)
[info]   at sbt.ForkMain$Run$2.call(ForkMain.java:296)
[info]   at sbt.ForkMain$Run$2.call(ForkMain.java:286)
[info]   at java.util.concurrent.FutureTask.run(FutureTask.java:266)
[info]   at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[info]   at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[info]   at java.lang.Thread.run(Thread.java:748)
org.apache.directory.api.ldap.model.exception.LdapOperationErrorException: 
/home/koert/src/spark/target/tmp/spark-dc223dd0-e499-4ccf-9600-c70e4706a909/1568218986864/partitions/system/1.3.6.1.4.1.18060.0.4.1.2.50.lg
 (No such file or directory)
        at 
org.apache.directory.server.core.partition.impl.btree.AbstractBTreePartition.modify(AbstractBTreePartition.java:1183)
        at 
org.apache.directory.server.core.shared.partition.DefaultPartitionNexus.sync(DefaultPartitionNexus.java:335)
        at 
org.apache.directory.server.core.DefaultDirectoryService.shutdown(DefaultDirectoryService.java:1299)
        at 
org.apache.directory.server.core.DefaultDirectoryService$1.run(DefaultDirectoryService.java:1230)
        at java.lang.Thread.run(Thread.java:748)
Caused by: java.io.FileNotFoundException: 
/home/koert/src/spark/target/tmp/spark-dc223dd0-e499-4ccf-9600-c70e4706a909/1568218986864/partitions/system/1.3.6.1.4.1.18060.0.4.1.2.50.lg
 (No such file or directory)
        at java.io.FileOutputStream.open0(Native Method)
        at java.io.FileOutputStream.open(FileOutputStream.java:270)
        at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
        at java.io.FileOutputStream.<init>(FileOutputStream.java:101)
        at jdbm.recman.TransactionManager.open(TransactionManager.java:209)
        at 
jdbm.recman.TransactionManager.synchronizeLogFromMemory(TransactionManager.java:202)
        at 
jdbm.recman.TransactionManager.synchronizeLog(TransactionManager.java:135)
        at 
org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmIndex.sync(JdbmIndex.java:698)
        at 
org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmPartition.sync(JdbmPartition.java:312)
        at 
org.apache.directory.server.core.partition.impl.btree.AbstractBTreePartition.modify(AbstractBTreePartition.java:1228)
        at 
org.apache.directory.server.core.partition.impl.btree.AbstractBTreePartition.modify(AbstractBTreePartition.java:1173)
        ... 4 more
java.io.FileNotFoundException: 
/home/koert/src/spark/target/tmp/spark-dc223dd0-e499-4ccf-9600-c70e4706a909/1568218986864/partitions/example/1.3.6.1.4.1.18060.0.4.1.2.5.lg
 (No such file or directory)
        at java.io.FileOutputStream.open0(Native Method)
        at java.io.FileOutputStream.open(FileOutputStream.java:270)
        at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
        at java.io.FileOutputStream.<init>(FileOutputStream.java:101)
        at jdbm.recman.TransactionManager.open(TransactionManager.java:209)
        at 
jdbm.recman.TransactionManager.synchronizeLogFromMemory(TransactionManager.java:202)
        at 
jdbm.recman.TransactionManager.synchronizeLog(TransactionManager.java:135)
        at 
org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmIndex.sync(JdbmIndex.java:698)
        at 
org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmPartition.sync(JdbmPartition.java:312)
        at 
org.apache.directory.server.core.shared.partition.DefaultPartitionNexus.sync(DefaultPartitionNexus.java:353)
        at 
org.apache.directory.server.core.DefaultDirectoryService.shutdown(DefaultDirectoryService.java:1299)
        at 
org.apache.directory.server.core.DefaultDirectoryService$1.run(DefaultDirectoryService.java:1230)
        at java.lang.Thread.run(Thread.java:748)
java.io.FileNotFoundException: 
/home/koert/src/spark/target/tmp/spark-dc223dd0-e499-4ccf-9600-c70e4706a909/1568218986864/partitions/system/1.3.6.1.4.1.18060.0.4.1.2.5.lg
 (No such file or directory)
        at java.io.FileOutputStream.open0(Native Method)
        at java.io.FileOutputStream.open(FileOutputStream.java:270)
        at java.io.FileOutputStream.<init>(FileOutputStream.java:213)
        at java.io.FileOutputStream.<init>(FileOutputStream.java:101)
        at jdbm.recman.TransactionManager.open(TransactionManager.java:209)
        at 
jdbm.recman.TransactionManager.synchronizeLogFromMemory(TransactionManager.java:202)
        at 
jdbm.recman.TransactionManager.synchronizeLog(TransactionManager.java:135)
        at 
org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmIndex.sync(JdbmIndex.java:698)
        at 
org.apache.directory.server.core.partition.impl.btree.jdbm.JdbmPartition.sync(JdbmPartition.java:312)
        at 
org.apache.directory.server.core.shared.partition.DefaultPartitionNexus.sync(DefaultPartitionNexus.java:353)
        at 
org.apache.directory.server.core.DefaultDirectoryService.shutdown(DefaultDirectoryService.java:1299)
        at 
org.apache.directory.server.core.DefaultDirectoryService$1.run(DefaultDirectoryService.java:1230)
        at java.lang.Thread.run(Thread.java:748)
[info] ScalaTest
[info] Run completed in 14 seconds, 392 milliseconds.
[info] Total number of tests run: 0
[info] Suites: completed 0, aborted 1
[info] Tests: succeeded 0, failed 0, canceled 0, ignored 0, pending 0
[info] *** 1 SUITE ABORTED ***
[error] Error: Total 1, Failed 0, Errors 1, Passed 0
[error] Error during tests:
[error]         org.apache.spark.sql.kafka010.KafkaDelegationTokenSuite
[error] (sql-kafka-0-10/test:testOnly) sbt.TestsFailedException: Tests 
unsuccessful
[error] Total time: 52 s, completed Sep 11, 2019 12:23:22 PM
{code}

> KafkaDelegationTokenSuite fails
> -------------------------------
>
>                 Key: SPARK-29027
>                 URL: https://issues.apache.org/jira/browse/SPARK-29027
>             Project: Spark
>          Issue Type: Bug
>          Components: Structured Streaming
>    Affects Versions: 3.0.0
>         Environment: {code}
> commit 6378d4bc06cd1bb1a209bd5fb63d10ef52d75eb4
> Author: Sean Owen <[email protected]>
> Date:   Mon Sep 9 10:19:40 2019 -0500
> {code}
> Ubuntu 16.04 with OpenJDK 1.8 (1.8.0_222-8u222-b10-1ubuntu1~16.04.1-b10)
>            Reporter: koert kuipers
>            Priority: Minor
>
> i am seeing consistent failure of KafkaDelegationTokenSuite on master
> {code}
> JsonUtilsSuite:
> - parsing partitions
> - parsing partitionOffsets
> KafkaDelegationTokenSuite:
> javax.security.sasl.SaslException: Failure to initialize security context 
> [Caused by GSSException: No valid credentials provided (Mechanism level: 
> Failed to find any Kerberos credentails)]
>       at 
> com.sun.security.sasl.gsskerb.GssKrb5Server.<init>(GssKrb5Server.java:125)
>       at 
> com.sun.security.sasl.gsskerb.FactoryImpl.createSaslServer(FactoryImpl.java:85)
>       at javax.security.sasl.Sasl.createSaslServer(Sasl.java:524)
>       at 
> org.apache.zookeeper.server.ZooKeeperSaslServer$1.run(ZooKeeperSaslServer.java:118)
>       at 
> org.apache.zookeeper.server.ZooKeeperSaslServer$1.run(ZooKeeperSaslServer.java:114)
>       at java.security.AccessController.doPrivileged(Native Method)
>       at javax.security.auth.Subject.doAs(Subject.java:422)
>       at 
> org.apache.zookeeper.server.ZooKeeperSaslServer.createSaslServer(ZooKeeperSaslServer.java:114)
>       at 
> org.apache.zookeeper.server.ZooKeeperSaslServer.<init>(ZooKeeperSaslServer.java:48)
>       at 
> org.apache.zookeeper.server.NIOServerCnxn.<init>(NIOServerCnxn.java:100)
>       at 
> org.apache.zookeeper.server.NIOServerCnxnFactory.createConnection(NIOServerCnxnFactory.java:156)
>       at 
> org.apache.zookeeper.server.NIOServerCnxnFactory.run(NIOServerCnxnFactory.java:197)
>       at java.lang.Thread.run(Thread.java:748)
> Caused by: GSSException: No valid credentials provided (Mechanism level: 
> Failed to find any Kerberos credentails)
>       at 
> sun.security.jgss.krb5.Krb5AcceptCredential.getInstance(Krb5AcceptCredential.java:87)
>       at 
> sun.security.jgss.krb5.Krb5MechFactory.getCredentialElement(Krb5MechFactory.java:127)
>       at 
> sun.security.jgss.GSSManagerImpl.getCredentialElement(GSSManagerImpl.java:193)
>       at sun.security.jgss.GSSCredentialImpl.add(GSSCredentialImpl.java:427)
>       at sun.security.jgss.GSSCredentialImpl.<init>(GSSCredentialImpl.java:62)
>       at 
> sun.security.jgss.GSSManagerImpl.createCredential(GSSManagerImpl.java:154)
>       at 
> com.sun.security.sasl.gsskerb.GssKrb5Server.<init>(GssKrb5Server.java:108)
>       ... 12 more
> org.apache.spark.sql.kafka010.KafkaDelegationTokenSuite *** ABORTED ***
>   org.I0Itec.zkclient.exception.ZkAuthFailedException: Authentication failure
>   at org.I0Itec.zkclient.ZkClient.waitForKeeperState(ZkClient.java:947)
>   at org.I0Itec.zkclient.ZkClient.waitUntilConnected(ZkClient.java:924)
>   at org.I0Itec.zkclient.ZkClient.connect(ZkClient.java:1231)
>   at org.I0Itec.zkclient.ZkClient.<init>(ZkClient.java:157)
>   at org.I0Itec.zkclient.ZkClient.<init>(ZkClient.java:131)
>   at kafka.utils.ZkUtils$.createZkClientAndConnection(ZkUtils.scala:93)
>   at kafka.utils.ZkUtils$.apply(ZkUtils.scala:75)
>   at 
> org.apache.spark.sql.kafka010.KafkaTestUtils.setupEmbeddedZookeeper(KafkaTestUtils.scala:202)
>   at 
> org.apache.spark.sql.kafka010.KafkaTestUtils.setup(KafkaTestUtils.scala:243)
>   at 
> org.apache.spark.sql.kafka010.KafkaDelegationTokenSuite.beforeAll(KafkaDelegationTokenSuite.scala:49)
>   ...
> KafkaSourceOffsetSuite:
> - comparison {"t":{"0":1}} <=> {"t":{"0":2}}
> - comparison {"t":{"1":0,"0":1}} <=> {"t":{"1":1,"0":2}}
> - comparison {"t":{"0":1},"T":{"0":0}} <=> {"t":{"0":2},"T":{"0":1}}
> - comparison {"t":{"0":1}} <=> {"t":{"1":1,"0":2}}
> - comparison {"t":{"0":1}} <=> {"t":{"1":3,"0":2}}
> - basic serialization - deserialization
> - OffsetSeqLog serialization - deserialization
> - read Spark 2.1.0 offset format
> {code}
> {code}
> [INFO] Reactor Summary for Spark Project Parent POM 3.0.0-SNAPSHOT:
> [INFO] 
> [INFO] Spark Project Parent POM ........................... SUCCESS [  4.178 
> s]
> [INFO] Spark Project Tags ................................. SUCCESS [  9.373 
> s]
> [INFO] Spark Project Sketch ............................... SUCCESS [ 24.586 
> s]
> [INFO] Spark Project Local DB ............................. SUCCESS [  5.456 
> s]
> [INFO] Spark Project Networking ........................... SUCCESS [ 49.819 
> s]
> [INFO] Spark Project Shuffle Streaming Service ............ SUCCESS [  6.096 
> s]
> [INFO] Spark Project Unsafe ............................... SUCCESS [ 14.714 
> s]
> [INFO] Spark Project Launcher ............................. SUCCESS [  5.277 
> s]
> [INFO] Spark Project Core ................................. SUCCESS [32:58 
> min]
> [INFO] Spark Project ML Local Library ..................... SUCCESS [ 41.076 
> s]
> [INFO] Spark Project GraphX ............................... SUCCESS [01:51 
> min]
> [INFO] Spark Project Streaming ............................ SUCCESS [06:43 
> min]
> [INFO] Spark Project Catalyst ............................. SUCCESS [15:04 
> min]
> [INFO] Spark Project SQL .................................. SUCCESS [  01:32 
> h]
> [INFO] Spark Project ML Library ........................... SUCCESS [26:48 
> min]
> [INFO] Spark Project Tools ................................ SUCCESS [  7.830 
> s]
> [INFO] Spark Project Hive ................................. SUCCESS [  01:00 
> h]
> [INFO] Spark Project Graph API ............................ SUCCESS [  3.378 
> s]
> [INFO] Spark Project Cypher ............................... SUCCESS [  3.672 
> s]
> [INFO] Spark Project Graph ................................ SUCCESS [  3.615 
> s]
> [INFO] Spark Project REPL ................................. SUCCESS [02:11 
> min]
> [INFO] Spark Project Assembly ............................. SUCCESS [  3.058 
> s]
> [INFO] Kafka 0.10+ Token Provider for Streaming ........... SUCCESS [ 24.208 
> s]
> [INFO] Spark Integration for Kafka 0.10 ................... SUCCESS [01:46 
> min]
> [INFO] Kafka 0.10+ Source for Structured Streaming ........ FAILURE [06:15 
> min]
> [INFO] Spark Project Examples ............................. SKIPPED
> [INFO] Spark Integration for Kafka 0.10 Assembly .......... SUCCESS [  2.467 
> s]
> [INFO] Spark Avro ......................................... SUCCESS [01:47 
> min]
> [INFO] 
> ------------------------------------------------------------------------
> [INFO] BUILD FAILURE
> [INFO] 
> ------------------------------------------------------------------------
> [INFO] Total time:  04:11 h
> [INFO] Finished at: 2019-09-08T02:49:07-04:00
> [INFO] 
> ------------------------------------------------------------------------
> {code}



--
This message was sent by Atlassian Jira
(v8.3.2#803003)

---------------------------------------------------------------------
To unsubscribe, e-mail: [email protected]
For additional commands, e-mail: [email protected]

Reply via email to