[ https://issues.apache.org/jira/browse/HDFS-13562?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Anbang Hu updated HDFS-13562: ----------------------------- Resolution: Won't Fix Status: Resolved (was: Patch Available) The timeout is related to slow DNS resolution on Windows machines. Please see [this |https://issues.apache.org/jira/browse/HDFS-13569?focusedCommentId=16478265&page=com.atlassian.jira.plugin.system.issuetabpanels%3Acomment-tabpanel#comment-16478265]for more details. Mark this JIRA as Won't Fix. > TestPipelinesFailover times out on Windows > ------------------------------------------ > > Key: HDFS-13562 > URL: https://issues.apache.org/jira/browse/HDFS-13562 > Project: Hadoop HDFS > Issue Type: Bug > Reporter: Anbang Hu > Assignee: Anbang Hu > Priority: Major > Labels: Windows > Attachments: HDFS-13562.000.patch > > > testCompleteFileAfterCrashFailover times out, causing other tests to fail > because they cannot start cluster: > {color:#d04437}[INFO] Running > org.apache.hadoop.hdfs.server.namenode.ha.TestPipelinesFailover{color} > {color:#d04437}[ERROR] Tests run: 8, Failures: 0, Errors: 8, Skipped: 0, Time > elapsed: 30.813 s <<< FAILURE! - in > org.apache.hadoop.hdfs.server.namenode.ha.TestPipelinesFailover{color} > {color:#d04437}[ERROR] > testCompleteFileAfterCrashFailover(org.apache.hadoop.hdfs.server.namenode.ha.TestPipelinesFailover) > Time elapsed: 30.009 s <<< ERROR!{color} > {color:#d04437}java.lang.Exception: test timed out after 30000 > milliseconds{color} > {color:#d04437} at java.net.Inet4AddressImpl.getHostByAddr(Native > Method){color} > {color:#d04437} at > java.net.InetAddress$2.getHostByAddr(InetAddress.java:932){color} > {color:#d04437} at > java.net.InetAddress.getHostFromNameService(InetAddress.java:617){color} > {color:#d04437} at > java.net.InetAddress.getCanonicalHostName(InetAddress.java:588){color} > {color:#d04437} at > org.apache.hadoop.security.SecurityUtil.getLocalHostName(SecurityUtil.java:256){color} > {color:#d04437} at > org.apache.hadoop.security.SecurityUtil.replacePattern(SecurityUtil.java:224){color} > {color:#d04437} at > org.apache.hadoop.security.SecurityUtil.getServerPrincipal(SecurityUtil.java:179){color} > {color:#d04437} at > org.apache.hadoop.security.AuthenticationFilterInitializer.getFilterConfigMap(AuthenticationFilterInitializer.java:90){color} > {color:#d04437} at > org.apache.hadoop.http.HttpServer2.getFilterProperties(HttpServer2.java:521){color} > {color:#d04437} at > org.apache.hadoop.http.HttpServer2.constructSecretProvider(HttpServer2.java:511){color} > {color:#d04437} at > org.apache.hadoop.http.HttpServer2.<init>(HttpServer2.java:400){color} > {color:#d04437} at > org.apache.hadoop.http.HttpServer2.<init>(HttpServer2.java:115){color} > {color:#d04437} at > org.apache.hadoop.http.HttpServer2$Builder.build(HttpServer2.java:336){color} > {color:#d04437} at > org.apache.hadoop.hdfs.server.datanode.web.DatanodeHttpServer.<init>(DatanodeHttpServer.java:131){color} > {color:#d04437} at > org.apache.hadoop.hdfs.server.datanode.DataNode.startInfoServer(DataNode.java:962){color} > {color:#d04437} at > org.apache.hadoop.hdfs.server.datanode.DataNode.startDataNode(DataNode.java:1370){color} > {color:#d04437} at > org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:495){color} > {color:#d04437} at > org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:2695){color} > {color:#d04437} at > org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:2598){color} > {color:#d04437} at > org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:1554){color} > {color:#d04437} at > org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:904){color} > {color:#d04437} at > org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:514){color} > {color:#d04437} at > org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:473){color} > {color:#d04437} at > org.apache.hadoop.hdfs.server.namenode.ha.TestPipelinesFailover.doWriteOverFailoverTest(TestPipelinesFailover.java:143){color} > {color:#d04437} at > org.apache.hadoop.hdfs.server.namenode.ha.TestPipelinesFailover.testCompleteFileAfterCrashFailover(TestPipelinesFailover.java:128){color} > {color:#d04437} at sun.reflect.NativeMethodAccessorImpl.invoke0(Native > Method){color} > {color:#d04437} at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62){color} > {color:#d04437} at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43){color} > {color:#d04437} at java.lang.reflect.Method.invoke(Method.java:498){color} > {color:#d04437} at > org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47){color} > {color:#d04437} at > org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12){color} > {color:#d04437} at > org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44){color} > {color:#d04437} at > org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17){color} > {color:#d04437} at > org.junit.internal.runners.statements.FailOnTimeout$StatementThread.run(FailOnTimeout.java:74){color} > {color:#d04437}[ERROR] > testWriteOverGracefulFailoverWithDnFail(org.apache.hadoop.hdfs.server.namenode.ha.TestPipelinesFailover) > Time elapsed: 0.055 s <<< ERROR!{color} > {color:#d04437}java.io.IOException: Could not fully delete > D:\OSS\hadoop-branch-2\hadoop-hdfs-project\hadoop-hdfs\target\test\data\dfs\name1{color} > {color:#d04437} at > org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:1047){color} > {color:#d04437} at > org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:883){color} > {color:#d04437} at > org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:514){color} > {color:#d04437} at > org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:473){color} > {color:#d04437} at > org.apache.hadoop.hdfs.server.namenode.ha.TestPipelinesFailover.doTestWriteOverFailoverWithDnFail(TestPipelinesFailover.java:221){color} > {color:#d04437} at > org.apache.hadoop.hdfs.server.namenode.ha.TestPipelinesFailover.testWriteOverGracefulFailoverWithDnFail(TestPipelinesFailover.java:203){color} > {color:#d04437} at sun.reflect.NativeMethodAccessorImpl.invoke0(Native > Method){color} > {color:#d04437} at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62){color} > {color:#d04437} at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43){color} > {color:#d04437} at java.lang.reflect.Method.invoke(Method.java:498){color} > {color:#d04437} at > org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47){color} > {color:#d04437} at > org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12){color} > {color:#d04437} at > org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44){color} > {color:#d04437} at > org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17){color} > {color:#d04437} at > org.junit.internal.runners.statements.FailOnTimeout$StatementThread.run(FailOnTimeout.java:74){color} > {color:#d04437}...{color} > > {color:#d04437}[INFO]{color} > {color:#d04437}[INFO] Results:{color} > {color:#d04437}[INFO]{color} > {color:#d04437}[ERROR] Errors:{color} > {color:#d04437}[ERROR] > TestPipelinesFailover.testAllocateBlockAfterCrashFailover:122->doWriteOverFailoverTest:143 > ╗ IO{color} > {color:#d04437}[ERROR] > TestPipelinesFailover.testCompleteFileAfterCrashFailover:128->doWriteOverFailoverTest:143 > ╗{color} > {color:#d04437}[ERROR] > TestPipelinesFailover.testFailoverRightBeforeCommitSynchronization:338 ╗ IO > Co...{color} > {color:#d04437}[ERROR] > TestPipelinesFailover.testLeaseRecoveryAfterFailover:283 ╗ IO Could not fully > ...{color} > {color:#d04437}[ERROR] TestPipelinesFailover.testPipelineRecoveryStress:455 ╗ > IO Could not fully dele...{color} > {color:#d04437}[ERROR] > TestPipelinesFailover.testWriteOverCrashFailoverWithDnFail:208->doTestWriteOverFailoverWithDnFail:221 > ╗ IO{color} > {color:#d04437}[ERROR] > TestPipelinesFailover.testWriteOverGracefulFailover:116->doWriteOverFailoverTest:143 > ╗ IO{color} > {color:#d04437}[ERROR] > TestPipelinesFailover.testWriteOverGracefulFailoverWithDnFail:203->doTestWriteOverFailoverWithDnFail:221 > ╗ IO{color} > {color:#d04437}[INFO]{color} > {color:#d04437}[ERROR] Tests run: 8, Failures: 0, Errors: 8, Skipped: 0{color} -- This message was sent by Atlassian JIRA (v7.6.3#76005) --------------------------------------------------------------------- To unsubscribe, e-mail: hdfs-issues-unsubscr...@hadoop.apache.org For additional commands, e-mail: hdfs-issues-h...@hadoop.apache.org